lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
c8ed634c7c2a5b99bea48b73b0dd38ea4beb2726
| 0
|
groybal/uPortal,phillips1021/uPortal,timlevett/uPortal,ChristianMurphy/uPortal,groybal/uPortal,ASU-Capstone/uPortal,jl1955/uPortal5,EdiaEducationTechnology/uPortal,GIP-RECIA/esup-uportal,drewwills/uPortal,ASU-Capstone/uPortal-Forked,kole9273/uPortal,stalele/uPortal,jonathanmtran/uPortal,Jasig/uPortal,andrewstuart/uPortal,jl1955/uPortal5,joansmith/uPortal,jl1955/uPortal5,Mines-Albi/esup-uportal,jameswennmacher/uPortal,pspaude/uPortal,kole9273/uPortal,cousquer/uPortal,jhelmer-unicon/uPortal,Jasig/uPortal-start,vertein/uPortal,kole9273/uPortal,joansmith/uPortal,EsupPortail/esup-uportal,ASU-Capstone/uPortal-Forked,jhelmer-unicon/uPortal,MichaelVose2/uPortal,phillips1021/uPortal,Mines-Albi/esup-uportal,vbonamy/esup-uportal,EdiaEducationTechnology/uPortal,jonathanmtran/uPortal,chasegawa/uPortal,EsupPortail/esup-uportal,jhelmer-unicon/uPortal,phillips1021/uPortal,GIP-RECIA/esco-portail,drewwills/uPortal,vbonamy/esup-uportal,drewwills/uPortal,phillips1021/uPortal,andrewstuart/uPortal,mgillian/uPortal,vbonamy/esup-uportal,bjagg/uPortal,Jasig/SSP-Platform,ChristianMurphy/uPortal,stalele/uPortal,pspaude/uPortal,bjagg/uPortal,MichaelVose2/uPortal,Jasig/SSP-Platform,ASU-Capstone/uPortal,apetro/uPortal,doodelicious/uPortal,ASU-Capstone/uPortal,kole9273/uPortal,GIP-RECIA/esup-uportal,jameswennmacher/uPortal,Jasig/SSP-Platform,phillips1021/uPortal,doodelicious/uPortal,stalele/uPortal,chasegawa/uPortal,EsupPortail/esup-uportal,stalele/uPortal,Jasig/uPortal,Jasig/SSP-Platform,jhelmer-unicon/uPortal,joansmith/uPortal,chasegawa/uPortal,pspaude/uPortal,apetro/uPortal,groybal/uPortal,ChristianMurphy/uPortal,Mines-Albi/esup-uportal,mgillian/uPortal,doodelicious/uPortal,cousquer/uPortal,ASU-Capstone/uPortal,jhelmer-unicon/uPortal,mgillian/uPortal,MichaelVose2/uPortal,joansmith/uPortal,GIP-RECIA/esup-uportal,chasegawa/uPortal,Jasig/uPortal,vertein/uPortal,groybal/uPortal,timlevett/uPortal,Mines-Albi/esup-uportal,jl1955/uPortal5,GIP-RECIA/esup-uportal,jameswennmacher/uPortal,apetro/uPortal,kole9273/uPortal,chasegawa/uPortal,EdiaEducationTechnology/uPortal,stalele/uPortal,ASU-Capstone/uPortal-Forked,joansmith/uPortal,EsupPortail/esup-uportal,vbonamy/esup-uportal,groybal/uPortal,EdiaEducationTechnology/uPortal,vbonamy/esup-uportal,vertein/uPortal,timlevett/uPortal,ASU-Capstone/uPortal,andrewstuart/uPortal,ASU-Capstone/uPortal-Forked,MichaelVose2/uPortal,jameswennmacher/uPortal,doodelicious/uPortal,drewwills/uPortal,doodelicious/uPortal,Mines-Albi/esup-uportal,jonathanmtran/uPortal,andrewstuart/uPortal,pspaude/uPortal,EsupPortail/esup-uportal,jl1955/uPortal5,apetro/uPortal,andrewstuart/uPortal,jameswennmacher/uPortal,apetro/uPortal,GIP-RECIA/esco-portail,cousquer/uPortal,vertein/uPortal,Jasig/uPortal-start,ASU-Capstone/uPortal-Forked,timlevett/uPortal,bjagg/uPortal,GIP-RECIA/esup-uportal,MichaelVose2/uPortal,GIP-RECIA/esco-portail,Jasig/SSP-Platform
|
/**
* Copyright 2003 The JA-SIG Collaborative. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the JA-SIG Collaborative
* (http://www.jasig.org/)."
*
* THIS SOFTWARE IS PROVIDED BY THE JA-SIG COLLABORATIVE "AS IS" AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JA-SIG COLLABORATIVE OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.jasig.portal.layout;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.jasig.portal.PortalException;
import org.jasig.portal.layout.restrictions.IUserLayoutRestriction;
import org.jasig.portal.layout.restrictions.PriorityRestriction;
import org.jasig.portal.layout.restrictions.RestrictionTypes;
import org.jasig.portal.utils.CommonUtils;
import org.jasig.portal.utils.GuidGenerator;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* The aggregated user layout implementation.
*
* @author <a href="mailto:mvi@immagic.com">Michael Ivanov</a>
* @version $Revision$
*/
public class AggregatedLayout implements IAggregatedLayout {
// The hashtable with the layout nodes
private Hashtable layout = null;
// The layout ID value
private String layoutId;
// The restriction mask
private int restrictionMask = 0;
// The IDs and names of the fragments which a user is owner of
private Hashtable fragments = null;
// The layout manager
private IAggregatedUserLayoutManager layoutManager = null;
// GUID generator
private static GuidGenerator guid = null;
private String cacheKey = null;
public AggregatedLayout ( String layoutId, IAggregatedUserLayoutManager layoutManager ) throws PortalException {
this ( layoutId );
this.layoutManager = layoutManager;
restrictionMask = layoutManager.getRestrictionMask();
}
public AggregatedLayout ( String layoutId ) throws PortalException {
this.layoutId = layoutId;
try {
if ( guid == null )
guid = new GuidGenerator();
updateCacheKey();
} catch ( Exception e ) {
throw new PortalException(e);
}
}
public void setLayoutData ( Hashtable layout ) throws PortalException {
this.layout = layout;
}
public Hashtable getLayoutData() throws PortalException {
return layout;
}
private void updateCacheKey() {
cacheKey = guid.getNewGuid();
}
private void bindRestrictions( IALNodeDescription nodeDesc, ContentHandler contentHandler ) throws SAXException {
Hashtable restrictions = nodeDesc.getRestrictions();
for ( Enumeration e = restrictions.keys(); e.hasMoreElements(); ) {
IUserLayoutRestriction restriction = (IUserLayoutRestriction ) e.nextElement();
if ( ( restriction.getRestrictionType() & restrictionMask ) > 0 ) {
AttributesImpl paramAttrs = new AttributesImpl();
paramAttrs.addAttribute("","path","path","CDATA",restriction.getRestrictionPath());
// we have to re-scale the priority restriction for the UI
if ( ( restriction.getRestrictionType() & RestrictionTypes.PRIORITY_RESTRICTION ) > 0 ) {
PriorityRestriction priorRestriction = (PriorityRestriction) restriction;
paramAttrs.addAttribute("","value","value","CDATA",((int)priorRestriction.getMinValue()/IAggregatedUserLayoutManager.PRIORITY_COEFF)+"-"+
((int)priorRestriction.getMaxValue()/IAggregatedUserLayoutManager.PRIORITY_COEFF));
} else
paramAttrs.addAttribute("","value","value","CDATA",restriction.getRestrictionExpression());
paramAttrs.addAttribute("","type","type","CDATA",restriction.getRestrictionType()+"");
contentHandler.startElement("",RESTRICTION,RESTRICTION,paramAttrs);
contentHandler.endElement("",RESTRICTION,RESTRICTION);
}
}
}
private ALNode getLayoutNode(String nodeId) {
try {
return (ALNode)layout.get(nodeId);
} catch ( Exception e ) {
return null;
}
}
private ALFolder getLayoutFolder(String folderId) {
try {
return (ALFolder)layout.get(folderId);
} catch (Exception e ) {
return null;
}
}
private ALNode getLastSiblingNode ( String nodeId ) {
ALNode node = null;
for ( String nextId = nodeId; nextId != null; ) {
node = getLayoutNode(nextId);
nextId = node.getNextNodeId();
}
return node;
}
private ALNode getFirstSiblingNode ( String nodeId ) {
ALNode node = null;
for ( String prevId = nodeId; prevId != null; ) {
node = getLayoutNode(prevId);
prevId = node.getPreviousNodeId();
}
return node;
}
private void createMarkingLeaf(ContentHandler contentHandler, String leafName, String parentNodeId, String nextNodeId) throws PortalException {
try {
AttributesImpl attributes = new AttributesImpl();
attributes.addAttribute("","parentID","parentID","CDATA",parentNodeId);
attributes.addAttribute("","nextID","nextID","CDATA",CommonUtils.nvl(nextNodeId));
contentHandler.startElement("",leafName,leafName,attributes);
contentHandler.endElement("",leafName,leafName);
} catch ( SAXException saxe ) {
throw new PortalException(saxe.getMessage());
}
}
private void createMarkingLeaf(Document document, String leafName, String parentNodeId, String nextNodeId, Node node) throws PortalException {
try {
Element markingLeaf = document.createElement(leafName);
markingLeaf.setAttribute("parentID",parentNodeId);
markingLeaf.setAttribute("nextID",nextNodeId);
node.appendChild(markingLeaf);
} catch ( Exception saxe ) {
throw new PortalException(saxe.getMessage());
}
}
private void createFragmentList(Document document, Node rootNode) throws PortalException {
try {
Element alternateLayouts = document.createElement("alternateLayouts");
if ( fragments != null ) {
for ( Enumeration fragEnum = fragments.keys(); fragEnum.hasMoreElements(); ) {
Element alternate = document.createElement("alternate");
String key = (String) fragEnum.nextElement();
alternate.setAttribute("ID",key);
alternate.setAttribute("name",(String) fragments.get(key));
alternateLayouts.appendChild(alternate);
}
}
rootNode.appendChild(alternateLayouts);
} catch ( Exception saxe ) {
throw new PortalException(saxe.getMessage());
}
}
private void createFragmentList(ContentHandler contentHandler) throws PortalException {
try {
contentHandler.startElement("","alternateLayouts","alternateLayouts",new AttributesImpl());
if ( fragments != null ) {
for ( Enumeration fragEnum = fragments.keys(); fragEnum.hasMoreElements(); ) {
AttributesImpl attributes = new AttributesImpl();
String key = (String) fragEnum.nextElement();
attributes.addAttribute("","ID","ID","CDATA",key);
attributes.addAttribute("","name","name","CDATA",(String) fragments.get(key));
contentHandler.startElement("","alternate","alternate",attributes);
contentHandler.endElement("","alternate","alternate");
}
}
contentHandler.endElement("","alternateLayouts","alternateLayouts");
} catch ( SAXException saxe ) {
throw new PortalException(saxe.getMessage());
}
}
/**
* Build the DOM consistent of folders and channels using the internal representation
* @param domLayout a <code>Document</code> a user layout document.
* @param node a <code>Element</code> a node that will be used as a root for the tree construction
* @param nodeId a <code>String</code> a nodeId from the user layout internal representation
* @exception PortalException if an error occurs
*/
private void appendDescendants(Document domLayout,Node node, String nodeId) throws PortalException {
ALNode layoutNode = getLayoutNode(nodeId);
IALNodeDescription nodeDesc = layoutNode.getNodeDescription();
Element markingMoveLeaf = null, markingAddLeaf = null;
Element newNode = domLayout.createElement((layoutNode.getNodeType()==IUserLayoutNodeDescription.FOLDER)?FOLDER:CHANNEL);
layoutNode.addNodeAttributes(newNode);
String parentId = layoutNode.getParentNodeId();
String nextId = layoutNode.getNextNodeId();
if ( layoutManager != null && parentId != null && layoutNode.getPreviousNodeId() == null ) {
if ( !layoutNode.getNodeDescription().isHidden() && !getLayoutNode(parentId).getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,parentId,nodeId) )
createMarkingLeaf(domLayout,ADD_TARGET,parentId,nodeId,node);
if ( moveTargetsNodeId != null && layoutManager.canMoveNode(moveTargetsNodeId,parentId,nodeId) )
createMarkingLeaf(domLayout,MOVE_TARGET,parentId,nodeId,node);
}
}
// Appending a new node
node.appendChild(newNode);
if ( parentId != null ) {
boolean isNodeMarkable = false;
if ( nextId != null && !getLayoutNode(nextId).getNodeDescription().isHidden() )
isNodeMarkable = true;
else if ( nextId == null )
isNodeMarkable = true;
if ( layoutManager != null && isNodeMarkable && !getLayoutNode(parentId).getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,parentId,nextId) )
createMarkingLeaf(domLayout,ADD_TARGET,parentId,nextId,node);
if ( moveTargetsNodeId != null && !moveTargetsNodeId.equals(nextId) &&
layoutManager.canMoveNode(moveTargetsNodeId,parentId,nextId) )
createMarkingLeaf(domLayout,MOVE_TARGET,parentId,nextId,node);
}
}
// Adding restrictions to the node
nodeDesc.addRestrictionChildren(newNode,domLayout);
if ( layoutNode.getNodeType() == IUserLayoutNodeDescription.FOLDER ) {
// Loop for all children
String firstChildId = ((ALFolder)layoutNode).getFirstChildNodeId();
for ( String nextNodeId = firstChildId; nextNodeId != null; ) {
// !!!!!!!!!!!
appendDescendants(domLayout,newNode,nextNodeId);
nextNodeId = getLayoutNode(nextNodeId).getNextNodeId();
}
} else if ( layoutNode.getNodeType() == IUserLayoutNodeDescription.CHANNEL ) {
ALChannelDescription channelDesc = (ALChannelDescription) nodeDesc;
// Adding channel parameters
channelDesc.addParameterChildren(newNode,domLayout);
}
}
/**
* Returns a list of fragment Ids existing in the layout.
*
* @return a <code>List</code> of <code>String</code> fragment Ids.
* @exception PortalException if an error occurs
*/
public Enumeration getFragmentIds() throws PortalException {
return null;
}
/**
* Returns an fragment Id for a given node.
* Returns null if the node is not part of any fragments.
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> fragment Id
* @exception PortalException if an error occurs
*/
public String getFragmentId(String nodeId) throws PortalException {
return null;
}
/**
* Returns an fragment root Id for a given fragment.
*
* @param fragmentId a <code>String</code> value
* @return a <code>String</code> fragment root Id
* @exception PortalException if an error occurs
*/
public String getFragmentRootId(String fragmentId) throws PortalException {
return null;
}
/**
* Writes user layout content (with appropriate markings) into
* a <code>ContentHandler</code>
*
* @param ch a <code>ContentHandler</code> value
* @exception PortalException if an error occurs
*/
public void writeTo(ContentHandler ch) throws PortalException {
writeTo ( getRootId(), ch );
}
/**
* Writes subtree of a user layout (with appropriate markings) defined by a particular node into
* a <code>ContentHandler</code>
*
* @param nodeId a <code>String</code> a node determining a user layout subtree.
* @param contentHandler a <code>ContentHandler</code> value
* @exception PortalException if an error occurs
*/
public void writeTo(String nodeId, ContentHandler contentHandler ) throws PortalException {
IALFolderDescription folderDescription = null;
IALChannelDescription channelDescription = null;
if ( contentHandler != null && nodeId != null ) {
try {
ALNode node = getLayoutNode(nodeId);
AttributesImpl attributes = new AttributesImpl();
// If we have a folder
if ( node.getNodeType() == IUserLayoutNodeDescription.FOLDER ) {
// Start document if we have the root node
if (nodeId.equals(getRootId())) contentHandler.startDocument();
if (nodeId.equals(getRootId())) {
contentHandler.startElement("",LAYOUT,LAYOUT,new AttributesImpl());
// Create a fragment list that the user owns
createFragmentList(contentHandler);
}
ALFolder folder = (ALFolder) node;
folderDescription = (IALFolderDescription) node.getNodeDescription();
attributes.addAttribute("","ID","ID","ID",nodeId);
attributes.addAttribute("","type","type","CDATA",
IUserLayoutFolderDescription.folderTypeNames[folderDescription.getFolderType()]);
attributes.addAttribute("","hidden","hidden","CDATA",CommonUtils.boolToStr(folderDescription.isHidden()));
attributes.addAttribute("","unremovable","unremovable","CDATA",CommonUtils.boolToStr(folderDescription.isUnremovable()));
attributes.addAttribute("","immutable","immutable","CDATA",CommonUtils.boolToStr(folderDescription.isImmutable()));
attributes.addAttribute("","name","name","CDATA",folderDescription.getName());
contentHandler.startElement("",FOLDER,FOLDER,attributes);
// Loop for all children
String firstChildId = folder.getFirstChildNodeId();
for ( String nextNodeId = firstChildId; nextNodeId != null; ) {
// if necessary we add marking nodes
if ( layoutManager != null ) {
if ( !node.getNodeDescription().isHidden() && !getLayoutNode(nextNodeId).getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,nodeId,nextNodeId) )
createMarkingLeaf(contentHandler,ADD_TARGET,nodeId,nextNodeId);
if ( moveTargetsNodeId != null && !moveTargetsNodeId.equals(nextNodeId) && layoutManager.canMoveNode(moveTargetsNodeId,nodeId,nextNodeId) )
createMarkingLeaf(contentHandler,MOVE_TARGET,nodeId,nextNodeId);
}
}
// Recurrence
writeTo (nextNodeId,contentHandler);
nextNodeId = getLayoutNode(nextNodeId).getNextNodeId();
}
// if necessary we add marking nodes to the end of the sibling line
if ( layoutManager != null && !node.getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,nodeId,null) )
createMarkingLeaf(contentHandler,ADD_TARGET,nodeId,null);
if ( moveTargetsNodeId != null && layoutManager.canMoveNode(moveTargetsNodeId,nodeId,null) )
createMarkingLeaf(contentHandler,MOVE_TARGET,nodeId,null);
}
// Putting restrictions to the content handler
if ( restrictionMask > 0 )
bindRestrictions(folderDescription,contentHandler);
contentHandler.endElement("",FOLDER,FOLDER);
// Start document if we have the root node
if (nodeId.equals(getRootId())) contentHandler.endElement("",LAYOUT,LAYOUT);
if (nodeId.equals(getRootId())) contentHandler.endDocument();
// If we have a channel
} else {
channelDescription = (IALChannelDescription) node.getNodeDescription();
attributes.addAttribute("","ID","ID","ID",nodeId);
attributes.addAttribute("","typeID","typeID","CDATA",channelDescription.getChannelTypeId());
attributes.addAttribute("","hidden","hidden","CDATA",CommonUtils.boolToStr(channelDescription.isHidden()));
attributes.addAttribute("","editable","editable","CDATA",CommonUtils.boolToStr(channelDescription.isEditable()));
attributes.addAttribute("","unremovable","unremovable","CDATA",CommonUtils.boolToStr(channelDescription.isUnremovable()));
attributes.addAttribute("","immutable","immutable","CDATA",CommonUtils.boolToStr(channelDescription.isImmutable()));
attributes.addAttribute("","name","name","CDATA",channelDescription.getName());
attributes.addAttribute("","description","description","CDATA",channelDescription.getDescription());
attributes.addAttribute("","title","title","CDATA",channelDescription.getTitle());
attributes.addAttribute("","class","class","CDATA",channelDescription.getClassName());
attributes.addAttribute("","chanID","chanID","CDATA",channelDescription.getChannelPublishId());
attributes.addAttribute("","fname","fname","CDATA",channelDescription.getFunctionalName());
attributes.addAttribute("","timeout","timeout","CDATA",String.valueOf(channelDescription.getTimeout()));
attributes.addAttribute("","hasHelp","hasHelp","CDATA",CommonUtils.boolToStr(channelDescription.hasHelp()));
attributes.addAttribute("","hasAbout","hasAbout","CDATA",CommonUtils.boolToStr(channelDescription.hasAbout()));
attributes.addAttribute("","secure","secure","CDATA",CommonUtils.boolToStr(channelDescription.isSecure()));
contentHandler.startElement("",CHANNEL,CHANNEL,attributes);
if ( channelDescription.hasParameters() ) {
Enumeration paramNames = channelDescription.getParameterNames();
while ( paramNames.hasMoreElements() ) {
String name = (String) paramNames.nextElement();
String value = channelDescription.getParameterValue(name);
AttributesImpl paramAttrs = new AttributesImpl();
paramAttrs.addAttribute("","name","name","CDATA",name);
paramAttrs.addAttribute("","value","value","CDATA",value);
paramAttrs.addAttribute("","override","override","CDATA",
channelDescription.canOverrideParameter(name)?"yes":"no");
contentHandler.startElement("",PARAMETER,PARAMETER,paramAttrs);
contentHandler.endElement("",PARAMETER,PARAMETER);
}
}
// Putting restrictions to the content handler
if ( restrictionMask > 0 )
bindRestrictions(channelDescription,contentHandler);
contentHandler.endElement("",CHANNEL,CHANNEL);
}
} catch ( SAXException saxe ) {
throw new PortalException(saxe.getMessage());
}
}
}
/**
* Writes user layout content (with appropriate markings) into
* a <code>Document</code> object
*
* @param document a <code>Document</code> value
* @exception PortalException if an error occurs
*/
public void writeTo(Document document) throws PortalException {
writeTo ( getRootId(), document );
}
/**
* Writes subtree of a user layout (with appropriate markings) defined by a particular node into
* a <code>Document</code>
*
* @param nodeId a <code>String</code> a node determining a user layout subtree.
* @param document a <code>Document</code> object
* @exception PortalException if an error occurs
*/
public void writeTo(String nodeId, Document document) throws PortalException {
try {
Element layoutNode = document.createElement(LAYOUT);
document.appendChild(layoutNode);
// Create a fragment list which the user owns
createFragmentList(document,layoutNode);
// Build the DOM
appendDescendants(document,layoutNode,nodeId);
} catch ( Exception e ) {
e.printStackTrace();
throw new PortalException ("Couldn't create the DOM representation: " + e );
}
}
/**
* Obtain a description of a node (channel or a folder) in a given user layout.
*
* @param nodeId a <code>String</code> channel subscribe id or folder id.
* @return an <code>UserLayoutNodeDescription</code> value
* @exception PortalException if an error occurs
*/
public IUserLayoutNodeDescription getNodeDescription(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getNodeDescription();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Returns a node specified by a node ID.
*
* @param nodeId a <code>String</code> value
* @return a <code>ALNode</code> object
* @exception PortalException if an error occurs
*/
public ALNode getNode( String nodeId) throws PortalException {
return getLayoutNode(nodeId);
}
/**
* Returns an Id of a parent user layout node.
* The user layout root node always has ID="root"
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> value
* @exception PortalException if an error occurs
*/
public String getParentId(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getParentNodeId();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Returns a list of child node Ids for a given node.
*
* @param nodeId a <code>String</code> value
* @return a <code>Enumeration</code> of <code>String</code> child node Ids.
* @exception PortalException if an error occurs
*/
public Enumeration getChildIds(String nodeId) throws PortalException {
Vector childIds = new Vector();
String firstChildId = getLayoutFolder(nodeId).getFirstChildNodeId();
for ( String nextNodeId = firstChildId; nextNodeId != null; ) {
childIds.add(nextNodeId);
nextNodeId = getLayoutNode(nextNodeId).getNextNodeId();
}
return childIds.elements();
}
/**
* Determine an Id of a next sibling node.
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> Id value of a next sibling node, or <code>null</code> if this is the last sibling.
* @exception PortalException if an error occurs
*/
public String getNextSiblingId(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getNextNodeId();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Determine an Id of a previous sibling node.
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> Id value of a previous sibling node, or <code>null</code> if this is the first sibling.
* @exception PortalException if an error occurs
*/
public String getPreviousSiblingId(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getPreviousNodeId();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Return a cache key, uniqly corresponding to the composition and the structure of the user layout.
*
* @return a <code>String</code> value
* @exception PortalException if an error occurs
*/
public String getCacheKey() throws PortalException {
return cacheKey;
}
/**
* Register a layout event listener
*
* @param l a <code>LayoutEventListener</code> object
* @return a <code>boolean</code> success status
*/
public boolean addLayoutEventListener(LayoutEventListener l) {
// TO IMPLEMENT
return false;
}
/**
* Remove a registered layout event listener.
*
* @param l a <code>LayoutEventListener</code> object
* @return a <code>boolean</code> success status
*/
public boolean removeLayoutEventListener(LayoutEventListener l) {
// TO IMPLEMENT
return false;
}
/**
* Returns a layout Id associated with this manager/
*
* @return an <code>String</code> layout Id value;
*/
public String getId() {
return layoutId;
}
/**
* Returns a node id associated with the supplied functional name.
*
* @param fname the functional name to lookup
* @return a <code>String</code> subscription id
* @exception PortalException if an error occurs
*/
public String getNodeId(String fname) throws PortalException {
for ( Enumeration nodeIds = layout.keys(); nodeIds.hasMoreElements() ;) {
String nodeId = nodeIds.nextElement().toString();
ALNode node = getLayoutNode(nodeId);
if ( node.getNodeType() == IUserLayoutNodeDescription.CHANNEL ) {
ALChannelDescription channelDesc = (ALChannelDescription) node.getNodeDescription();
if ( fname.equals(channelDesc.getFunctionalName()) )
return node.getId();
}
}
return null;
}
/**
* Returns a list of node Ids in the layout.
*
* @return a <code>Enumeration</code> of node Ids
* @exception PortalException if an error occurs
*/
public Enumeration getNodeIds() throws PortalException {
if ( layout == null )
throw new PortalException ( "The layout is NULL!" );
return layout.keys();
}
/**
* Returns an id of the root node.
*
* @return a <code>String</code> value
*/
public String getRootId() {
return IALFolderDescription.ROOT_FOLDER_ID;
}
}
|
source/org/jasig/portal/layout/AggregatedLayout.java
|
/**
* Copyright 2002 The JA-SIG Collaborative. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the JA-SIG Collaborative
* (http://www.jasig.org/)."
*
* THIS SOFTWARE IS PROVIDED BY THE JA-SIG COLLABORATIVE "AS IS" AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JA-SIG COLLABORATIVE OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.jasig.portal.layout;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.jasig.portal.PortalException;
import org.jasig.portal.layout.restrictions.IUserLayoutRestriction;
import org.jasig.portal.layout.restrictions.PriorityRestriction;
import org.jasig.portal.layout.restrictions.RestrictionTypes;
import org.jasig.portal.services.LogService;
import org.jasig.portal.utils.CommonUtils;
import org.jasig.portal.utils.GuidGenerator;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* The aggregated user layout implementation.
*
* @author <a href="mailto:mvi@immagic.com">Michael Ivanov</a>
* @version 1.1
*/
public class AggregatedLayout implements IAggregatedLayout {
// The hashtable with the layout nodes
private Hashtable layout = null;
// The layout ID value
private String layoutId;
// The restriction mask
private int restrictionMask = 0;
// The IDs and names of the fragments which a user is owner of
private Hashtable fragments = null;
// The layout manager
private IAggregatedUserLayoutManager layoutManager = null;
// GUID generator
private static GuidGenerator guid = null;
private String cacheKey = null;
public AggregatedLayout ( String layoutId, IAggregatedUserLayoutManager layoutManager ) throws PortalException {
this ( layoutId );
this.layoutManager = layoutManager;
restrictionMask = layoutManager.getRestrictionMask();
}
public AggregatedLayout ( String layoutId ) throws PortalException {
this.layoutId = layoutId;
try {
if ( guid == null )
guid = new GuidGenerator();
updateCacheKey();
} catch ( Exception e ) {
throw new PortalException(e);
}
}
public void setLayoutData ( Hashtable layout ) throws PortalException {
this.layout = layout;
}
public Hashtable getLayoutData() throws PortalException {
return layout;
}
private void updateCacheKey() {
cacheKey = guid.getNewGuid();
}
private void bindRestrictions( IALNodeDescription nodeDesc, ContentHandler contentHandler ) throws SAXException {
Hashtable restrictions = nodeDesc.getRestrictions();
for ( Enumeration e = restrictions.keys(); e.hasMoreElements(); ) {
IUserLayoutRestriction restriction = (IUserLayoutRestriction ) e.nextElement();
if ( ( restriction.getRestrictionType() & restrictionMask ) > 0 ) {
AttributesImpl paramAttrs = new AttributesImpl();
paramAttrs.addAttribute("","path","path","CDATA",restriction.getRestrictionPath());
// we have to re-scale the priority restriction for the UI
if ( ( restriction.getRestrictionType() & RestrictionTypes.PRIORITY_RESTRICTION ) > 0 ) {
PriorityRestriction priorRestriction = (PriorityRestriction) restriction;
paramAttrs.addAttribute("","value","value","CDATA",((int)priorRestriction.getMinValue()/IAggregatedUserLayoutManager.PRIORITY_COEFF)+"-"+
((int)priorRestriction.getMaxValue()/IAggregatedUserLayoutManager.PRIORITY_COEFF));
} else
paramAttrs.addAttribute("","value","value","CDATA",restriction.getRestrictionExpression());
paramAttrs.addAttribute("","type","type","CDATA",restriction.getRestrictionType()+"");
contentHandler.startElement("",RESTRICTION,RESTRICTION,paramAttrs);
contentHandler.endElement("",RESTRICTION,RESTRICTION);
}
}
}
private ALNode getLayoutNode(String nodeId) {
try {
return (ALNode)layout.get(nodeId);
} catch ( Exception e ) {
return null;
}
}
private ALFolder getLayoutFolder(String folderId) {
try {
return (ALFolder)layout.get(folderId);
} catch (Exception e ) {
return null;
}
}
private ALNode getLastSiblingNode ( String nodeId ) {
ALNode node = null;
for ( String nextId = nodeId; nextId != null; ) {
node = getLayoutNode(nextId);
nextId = node.getNextNodeId();
}
return node;
}
private ALNode getFirstSiblingNode ( String nodeId ) {
ALNode node = null;
for ( String prevId = nodeId; prevId != null; ) {
node = getLayoutNode(prevId);
prevId = node.getPreviousNodeId();
}
return node;
}
private void createMarkingLeaf(ContentHandler contentHandler, String leafName, String parentNodeId, String nextNodeId) throws PortalException {
try {
AttributesImpl attributes = new AttributesImpl();
attributes.addAttribute("","parentID","parentID","CDATA",parentNodeId);
attributes.addAttribute("","nextID","nextID","CDATA",CommonUtils.nvl(nextNodeId));
contentHandler.startElement("",leafName,leafName,attributes);
contentHandler.endElement("",leafName,leafName);
} catch ( SAXException saxe ) {
throw new PortalException(saxe.getMessage());
}
}
private void createMarkingLeaf(Document document, String leafName, String parentNodeId, String nextNodeId, Node node) throws PortalException {
try {
Element markingLeaf = document.createElement(leafName);
markingLeaf.setAttribute("parentID",parentNodeId);
markingLeaf.setAttribute("nextID",nextNodeId);
node.appendChild(markingLeaf);
} catch ( Exception saxe ) {
throw new PortalException(saxe.getMessage());
}
}
private void createFragmentList(Document document, Node rootNode) throws PortalException {
try {
Element alternateLayouts = document.createElement("alternateLayouts");
if ( fragments != null ) {
for ( Enumeration fragEnum = fragments.keys(); fragEnum.hasMoreElements(); ) {
Element alternate = document.createElement("alternate");
String key = (String) fragEnum.nextElement();
alternate.setAttribute("ID",key);
alternate.setAttribute("name",(String) fragments.get(key));
alternateLayouts.appendChild(alternate);
}
}
rootNode.appendChild(alternateLayouts);
} catch ( Exception saxe ) {
throw new PortalException(saxe.getMessage());
}
}
private void createFragmentList(ContentHandler contentHandler) throws PortalException {
try {
contentHandler.startElement("","alternateLayouts","alternateLayouts",new AttributesImpl());
if ( fragments != null ) {
for ( Enumeration fragEnum = fragments.keys(); fragEnum.hasMoreElements(); ) {
AttributesImpl attributes = new AttributesImpl();
String key = (String) fragEnum.nextElement();
attributes.addAttribute("","ID","ID","CDATA",key);
attributes.addAttribute("","name","name","CDATA",(String) fragments.get(key));
contentHandler.startElement("","alternate","alternate",attributes);
contentHandler.endElement("","alternate","alternate");
}
}
contentHandler.endElement("","alternateLayouts","alternateLayouts");
} catch ( SAXException saxe ) {
throw new PortalException(saxe.getMessage());
}
}
/**
* Build the DOM consistent of folders and channels using the internal representation
* @param domLayout a <code>Document</code> a user layout document.
* @param node a <code>Element</code> a node that will be used as a root for the tree construction
* @param nodeId a <code>String</code> a nodeId from the user layout internal representation
* @exception PortalException if an error occurs
*/
private void appendDescendants(Document domLayout,Node node, String nodeId) throws PortalException {
ALNode layoutNode = getLayoutNode(nodeId);
IALNodeDescription nodeDesc = layoutNode.getNodeDescription();
Element markingMoveLeaf = null, markingAddLeaf = null;
Element newNode = domLayout.createElement((layoutNode.getNodeType()==IUserLayoutNodeDescription.FOLDER)?FOLDER:CHANNEL);
layoutNode.addNodeAttributes(newNode);
String parentId = layoutNode.getParentNodeId();
String nextId = layoutNode.getNextNodeId();
if ( layoutManager != null && parentId != null && layoutNode.getPreviousNodeId() == null ) {
if ( !layoutNode.getNodeDescription().isHidden() && !getLayoutNode(parentId).getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,parentId,nodeId) )
createMarkingLeaf(domLayout,ADD_TARGET,parentId,nodeId,node);
if ( moveTargetsNodeId != null && layoutManager.canMoveNode(moveTargetsNodeId,parentId,nodeId) )
createMarkingLeaf(domLayout,MOVE_TARGET,parentId,nodeId,node);
}
}
// Appending a new node
node.appendChild(newNode);
if ( parentId != null ) {
boolean isNodeMarkable = false;
if ( nextId != null && !getLayoutNode(nextId).getNodeDescription().isHidden() )
isNodeMarkable = true;
else if ( nextId == null )
isNodeMarkable = true;
if ( layoutManager != null && isNodeMarkable && !getLayoutNode(parentId).getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,parentId,nextId) )
createMarkingLeaf(domLayout,ADD_TARGET,parentId,nextId,node);
if ( moveTargetsNodeId != null && !moveTargetsNodeId.equals(nextId) &&
layoutManager.canMoveNode(moveTargetsNodeId,parentId,nextId) )
createMarkingLeaf(domLayout,MOVE_TARGET,parentId,nextId,node);
}
}
// Adding restrictions to the node
nodeDesc.addRestrictionChildren(newNode,domLayout);
if ( layoutNode.getNodeType() == IUserLayoutNodeDescription.FOLDER ) {
// Loop for all children
String firstChildId = ((ALFolder)layoutNode).getFirstChildNodeId();
for ( String nextNodeId = firstChildId; nextNodeId != null; ) {
// !!!!!!!!!!!
appendDescendants(domLayout,newNode,nextNodeId);
nextNodeId = getLayoutNode(nextNodeId).getNextNodeId();
}
} else if ( layoutNode.getNodeType() == IUserLayoutNodeDescription.CHANNEL ) {
ALChannelDescription channelDesc = (ALChannelDescription) nodeDesc;
// Adding channel parameters
channelDesc.addParameterChildren(newNode,domLayout);
}
}
/**
* Returns a list of fragment Ids existing in the layout.
*
* @return a <code>List</code> of <code>String</code> fragment Ids.
* @exception PortalException if an error occurs
*/
public Enumeration getFragmentIds() throws PortalException {
return null;
}
/**
* Returns an fragment Id for a given node.
* Returns null if the node is not part of any fragments.
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> fragment Id
* @exception PortalException if an error occurs
*/
public String getFragmentId(String nodeId) throws PortalException {
return null;
}
/**
* Returns an fragment root Id for a given fragment.
*
* @param fragmentId a <code>String</code> value
* @return a <code>String</code> fragment root Id
* @exception PortalException if an error occurs
*/
public String getFragmentRootId(String fragmentId) throws PortalException {
return null;
}
/**
* Writes user layout content (with appropriate markings) into
* a <code>ContentHandler</code>
*
* @param ch a <code>ContentHandler</code> value
* @exception PortalException if an error occurs
*/
public void writeTo(ContentHandler ch) throws PortalException {
writeTo ( getRootId(), ch );
}
/**
* Writes subtree of a user layout (with appropriate markings) defined by a particular node into
* a <code>ContentHandler</code>
*
* @param nodeId a <code>String</code> a node determining a user layout subtree.
* @param contentHandler a <code>ContentHandler</code> value
* @exception PortalException if an error occurs
*/
public void writeTo(String nodeId, ContentHandler contentHandler ) throws PortalException {
IALFolderDescription folderDescription = null;
IALChannelDescription channelDescription = null;
if ( contentHandler != null && nodeId != null ) {
try {
ALNode node = getLayoutNode(nodeId);
AttributesImpl attributes = new AttributesImpl();
// If we have a folder
if ( node.getNodeType() == IUserLayoutNodeDescription.FOLDER ) {
// Start document if we have the root node
if (nodeId.equals(getRootId())) contentHandler.startDocument();
if (nodeId.equals(getRootId())) {
contentHandler.startElement("",LAYOUT,LAYOUT,new AttributesImpl());
// Create a fragment list that the user owns
createFragmentList(contentHandler);
}
ALFolder folder = (ALFolder) node;
folderDescription = (IALFolderDescription) node.getNodeDescription();
attributes.addAttribute("","ID","ID","ID",nodeId);
attributes.addAttribute("","type","type","CDATA",
IUserLayoutFolderDescription.folderTypeNames[folderDescription.getFolderType()]);
attributes.addAttribute("","hidden","hidden","CDATA",CommonUtils.boolToStr(folderDescription.isHidden()));
attributes.addAttribute("","unremovable","unremovable","CDATA",CommonUtils.boolToStr(folderDescription.isUnremovable()));
attributes.addAttribute("","immutable","immutable","CDATA",CommonUtils.boolToStr(folderDescription.isImmutable()));
attributes.addAttribute("","name","name","CDATA",folderDescription.getName());
contentHandler.startElement("",FOLDER,FOLDER,attributes);
// Loop for all children
String firstChildId = folder.getFirstChildNodeId();
for ( String nextNodeId = firstChildId; nextNodeId != null; ) {
// if necessary we add marking nodes
if ( layoutManager != null ) {
if ( !node.getNodeDescription().isHidden() && !getLayoutNode(nextNodeId).getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,nodeId,nextNodeId) )
createMarkingLeaf(contentHandler,ADD_TARGET,nodeId,nextNodeId);
if ( moveTargetsNodeId != null && !moveTargetsNodeId.equals(nextNodeId) && layoutManager.canMoveNode(moveTargetsNodeId,nodeId,nextNodeId) )
createMarkingLeaf(contentHandler,MOVE_TARGET,nodeId,nextNodeId);
}
}
// Recurrence
writeTo (nextNodeId,contentHandler);
nextNodeId = getLayoutNode(nextNodeId).getNextNodeId();
}
// if necessary we add marking nodes to the end of the sibling line
if ( layoutManager != null && !node.getNodeDescription().isHidden() ) {
String moveTargetsNodeId = layoutManager.getNodeBeingMoved().getId();
IALNodeDescription addTargetsNodeDesc = layoutManager.getNodeBeingAdded();
if ( addTargetsNodeDesc != null && layoutManager.canAddNode(addTargetsNodeDesc,nodeId,null) )
createMarkingLeaf(contentHandler,ADD_TARGET,nodeId,null);
if ( moveTargetsNodeId != null && layoutManager.canMoveNode(moveTargetsNodeId,nodeId,null) )
createMarkingLeaf(contentHandler,MOVE_TARGET,nodeId,null);
}
// Putting restrictions to the content handler
if ( restrictionMask > 0 )
bindRestrictions(folderDescription,contentHandler);
contentHandler.endElement("",FOLDER,FOLDER);
// Start document if we have the root node
if (nodeId.equals(getRootId())) contentHandler.endElement("",LAYOUT,LAYOUT);
if (nodeId.equals(getRootId())) contentHandler.endDocument();
// If we have a channel
} else {
channelDescription = (IALChannelDescription) node.getNodeDescription();
attributes.addAttribute("","ID","ID","ID",nodeId);
attributes.addAttribute("","typeID","typeID","CDATA",channelDescription.getChannelTypeId());
attributes.addAttribute("","hidden","hidden","CDATA",CommonUtils.boolToStr(channelDescription.isHidden()));
attributes.addAttribute("","editable","editable","CDATA",CommonUtils.boolToStr(channelDescription.isEditable()));
attributes.addAttribute("","unremovable","unremovable","CDATA",CommonUtils.boolToStr(channelDescription.isUnremovable()));
attributes.addAttribute("","immutable","immutable","CDATA",CommonUtils.boolToStr(channelDescription.isImmutable()));
attributes.addAttribute("","name","name","CDATA",channelDescription.getName());
attributes.addAttribute("","description","description","CDATA",channelDescription.getDescription());
attributes.addAttribute("","title","title","CDATA",channelDescription.getTitle());
attributes.addAttribute("","class","class","CDATA",channelDescription.getClassName());
attributes.addAttribute("","chanID","chanID","CDATA",channelDescription.getChannelPublishId());
attributes.addAttribute("","fname","fname","CDATA",channelDescription.getFunctionalName());
attributes.addAttribute("","timeout","timeout","CDATA",String.valueOf(channelDescription.getTimeout()));
attributes.addAttribute("","hasHelp","hasHelp","CDATA",CommonUtils.boolToStr(channelDescription.hasHelp()));
attributes.addAttribute("","hasAbout","hasAbout","CDATA",CommonUtils.boolToStr(channelDescription.hasAbout()));
attributes.addAttribute("","secure","secure","CDATA",CommonUtils.boolToStr(channelDescription.isSecure()));
contentHandler.startElement("",CHANNEL,CHANNEL,attributes);
if ( channelDescription.hasParameters() ) {
Enumeration paramNames = channelDescription.getParameterNames();
while ( paramNames.hasMoreElements() ) {
String name = (String) paramNames.nextElement();
String value = channelDescription.getParameterValue(name);
AttributesImpl paramAttrs = new AttributesImpl();
paramAttrs.addAttribute("","name","name","CDATA",name);
paramAttrs.addAttribute("","value","value","CDATA",value);
paramAttrs.addAttribute("","override","override","CDATA",
channelDescription.canOverrideParameter(name)?"yes":"no");
contentHandler.startElement("",PARAMETER,PARAMETER,paramAttrs);
contentHandler.endElement("",PARAMETER,PARAMETER);
}
}
// Putting restrictions to the content handler
if ( restrictionMask > 0 )
bindRestrictions(channelDescription,contentHandler);
contentHandler.endElement("",CHANNEL,CHANNEL);
}
} catch ( SAXException saxe ) {
throw new PortalException(saxe.getMessage());
}
}
}
/**
* Writes user layout content (with appropriate markings) into
* a <code>Document</code> object
*
* @param document a <code>Document</code> value
* @exception PortalException if an error occurs
*/
public void writeTo(Document document) throws PortalException {
writeTo ( getRootId(), document );
}
/**
* Writes subtree of a user layout (with appropriate markings) defined by a particular node into
* a <code>Document</code>
*
* @param nodeId a <code>String</code> a node determining a user layout subtree.
* @param document a <code>Document</code> object
* @exception PortalException if an error occurs
*/
public void writeTo(String nodeId, Document document) throws PortalException {
try {
Element layoutNode = document.createElement(LAYOUT);
document.appendChild(layoutNode);
// Create a fragment list which the user owns
createFragmentList(document,layoutNode);
// Build the DOM
appendDescendants(document,layoutNode,nodeId);
} catch ( Exception e ) {
e.printStackTrace();
throw new PortalException ("Couldn't create the DOM representation: " + e );
}
}
/**
* Obtain a description of a node (channel or a folder) in a given user layout.
*
* @param nodeId a <code>String</code> channel subscribe id or folder id.
* @return an <code>UserLayoutNodeDescription</code> value
* @exception PortalException if an error occurs
*/
public IUserLayoutNodeDescription getNodeDescription(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getNodeDescription();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Returns a node specified by a node ID.
*
* @param nodeId a <code>String</code> value
* @return a <code>ALNode</code> object
* @exception PortalException if an error occurs
*/
public ALNode getNode( String nodeId) throws PortalException {
return getLayoutNode(nodeId);
}
/**
* Returns an Id of a parent user layout node.
* The user layout root node always has ID="root"
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> value
* @exception PortalException if an error occurs
*/
public String getParentId(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getParentNodeId();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Returns a list of child node Ids for a given node.
*
* @param nodeId a <code>String</code> value
* @return a <code>Enumeration</code> of <code>String</code> child node Ids.
* @exception PortalException if an error occurs
*/
public Enumeration getChildIds(String nodeId) throws PortalException {
Vector childIds = new Vector();
String firstChildId = getLayoutFolder(nodeId).getFirstChildNodeId();
for ( String nextNodeId = firstChildId; nextNodeId != null; ) {
childIds.add(nextNodeId);
nextNodeId = getLayoutNode(nextNodeId).getNextNodeId();
}
return childIds.elements();
}
/**
* Determine an Id of a next sibling node.
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> Id value of a next sibling node, or <code>null</code> if this is the last sibling.
* @exception PortalException if an error occurs
*/
public String getNextSiblingId(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getNextNodeId();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Determine an Id of a previous sibling node.
*
* @param nodeId a <code>String</code> value
* @return a <code>String</code> Id value of a previous sibling node, or <code>null</code> if this is the first sibling.
* @exception PortalException if an error occurs
*/
public String getPreviousSiblingId(String nodeId) throws PortalException {
ALNode node = getLayoutNode(nodeId);
if ( node != null )
return node.getPreviousNodeId();
throw new PortalException ( "The node with nodeID="+nodeId+" does not exist in the layout!" );
}
/**
* Return a cache key, uniqly corresponding to the composition and the structure of the user layout.
*
* @return a <code>String</code> value
* @exception PortalException if an error occurs
*/
public String getCacheKey() throws PortalException {
return cacheKey;
}
/**
* Register a layout event listener
*
* @param l a <code>LayoutEventListener</code> object
* @return a <code>boolean</code> success status
*/
public boolean addLayoutEventListener(LayoutEventListener l) {
// TO IMPLEMENT
return false;
}
/**
* Remove a registered layout event listener.
*
* @param l a <code>LayoutEventListener</code> object
* @return a <code>boolean</code> success status
*/
public boolean removeLayoutEventListener(LayoutEventListener l) {
// TO IMPLEMENT
return false;
}
/**
* Returns a layout Id associated with this manager/
*
* @return an <code>String</code> layout Id value;
*/
public String getId() {
return layoutId;
}
/**
* Returns a node id associated with the supplied functional name.
*
* @param fname the functional name to lookup
* @return a <code>String</code> subscription id
* @exception PortalException if an error occurs
*/
public String getNodeId(String fname) throws PortalException {
for ( Enumeration nodeIds = layout.keys(); nodeIds.hasMoreElements() ;) {
String nodeId = nodeIds.nextElement().toString();
ALNode node = getLayoutNode(nodeId);
if ( node.getNodeType() == IUserLayoutNodeDescription.CHANNEL ) {
ALChannelDescription channelDesc = (ALChannelDescription) node.getNodeDescription();
if ( fname.equals(channelDesc.getFunctionalName()) )
return node.getId();
}
}
return null;
}
/**
* Returns a list of node Ids in the layout.
*
* @return a <code>Enumeration</code> of node Ids
* @exception PortalException if an error occurs
*/
public Enumeration getNodeIds() throws PortalException {
if ( layout == null )
throw new PortalException ( "The layout is NULL!" );
return layout.keys();
}
/**
* Returns an id of the root node.
*
* @return a <code>String</code> value
*/
public String getRootId() {
return IALFolderDescription.ROOT_FOLDER_ID;
}
}
|
Organized imports, added cvs revision keyword expansion.
git-svn-id: 477788cc2a8229a747c5b8073e47c1d0f6ec0604@7881 f5dbab47-78f9-eb45-b975-e544023573eb
|
source/org/jasig/portal/layout/AggregatedLayout.java
|
Organized imports, added cvs revision keyword expansion.
|
|
Java
|
apache-2.0
|
eb1918bffa945d796289a2f861cef6828fd9f052
| 0
|
matsprea/phonegap-googlemaps-plugin,denisbabineau/phonegap-googlemaps-plugin,denisbabineau/phonegap-googlemaps-plugin,dukhanov/cordova-plugin-googlemaps,cabify/cordova-plugin-googlemaps,hitoci/phonegap-googlemaps-plugin,mapsplugin/cordova-plugin-googlemaps,athiradandira/phonegap-googlemaps-plugin,smcpjames/cordova-plugin-googlemaps,quiuquio/cordova-plugin-googlemaps,joewoodhouse/phonegap-googlemaps-plugin,wf9a5m75/phonegap-googlemaps-plugin,athiradandira/phonegap-googlemaps-plugin,smcpjames/cordova-plugin-googlemaps,wf9a5m75/phonegap-googlemaps-plugin,edivancamargo/phonegap-googlemaps-plugin,hungdoan2/phonegap-googlemaps-plugin,quiuquio/cordova-plugin-googlemaps,EmilianStankov/cordova-plugin-googlemaps,hitoci/phonegap-googlemaps-plugin,wf9a5m75/phonegap-googlemaps-plugin,pinkbike/phonegap-googlemaps-plugin,EmilianStankov/cordova-plugin-googlemaps,LouisMazel/cordova-plugin-googlemaps,denisbabineau/cordova-plugin-googlemaps,alexislg2/phonegap-googlemaps-plugin,matsprea/phonegap-googlemaps-plugin,mapsplugin/cordova-plugin-googlemaps,hungdoan2/phonegap-googlemaps-plugin,pinkbike/phonegap-googlemaps-plugin,hitoci/phonegap-googlemaps-plugin,smcpjames/cordova-plugin-googlemaps,hitoci/phonegap-googlemaps-plugin,blanedaze/phonegap-googlemaps-plugin,rynomster/cordova-plugin-googlemaps,alexislg2/phonegap-googlemaps-plugin,blanedaze/phonegap-googlemaps-plugin,denisbabineau/phonegap-googlemaps-plugin,athiradandira/phonegap-googlemaps-plugin,joewoodhouse/phonegap-googlemaps-plugin,denisbabineau/cordova-plugin-googlemaps,alexislg2/phonegap-googlemaps-plugin,cabify/cordova-plugin-googlemaps,denisbabineau/cordova-plugin-googlemaps,quiuquio/cordova-plugin-googlemaps,edivancamargo/phonegap-googlemaps-plugin,edivancamargo/phonegap-googlemaps-plugin,rynomster/cordova-plugin-googlemaps,hungdoan2/phonegap-googlemaps-plugin,EmilianStankov/cordova-plugin-googlemaps,cabify/cordova-plugin-googlemaps,mapsplugin/cordova-plugin-googlemaps,pinkbike/phonegap-googlemaps-plugin,athiradandira/phonegap-googlemaps-plugin,dukhanov/cordova-plugin-googlemaps,edivancamargo/phonegap-googlemaps-plugin,LouisMazel/cordova-plugin-googlemaps,joewoodhouse/phonegap-googlemaps-plugin,rynomster/cordova-plugin-googlemaps,dukhanov/cordova-plugin-googlemaps,blanedaze/phonegap-googlemaps-plugin,matsprea/phonegap-googlemaps-plugin,matsprea/phonegap-googlemaps-plugin,hungdoan2/phonegap-googlemaps-plugin,LouisMazel/cordova-plugin-googlemaps
|
package plugin.google.maps;
import org.apache.cordova.CallbackContext;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.google.android.gms.maps.model.TileOverlay;
import com.google.android.gms.maps.model.TileOverlayOptions;
public class PluginTileOverlay extends MyPlugin implements MyPluginInterface {
/**
* Create tile overlay
*
* @param args
* @param callbackContext
* @throws JSONException
*/
@SuppressWarnings("unused")
private void createTileOverlay(final JSONArray args,
final CallbackContext callbackContext) throws JSONException {
JSONObject opts = args.getJSONObject(1);
int tileSize = opts.getInt("tileSize");
final String tileUrlFormat = opts.getString("tileUrlFormat");
double opacity = 1.0;
if (opts.has("opacity")) {
opacity = opts.getDouble("opacity");
}
PluginTileProvider tileProvider = new PluginTileProvider(tileUrlFormat, opacity, tileSize);
TileOverlayOptions options = new TileOverlayOptions();
options.tileProvider(tileProvider);
if (opts.has("zIndex")) {
options.zIndex((float)opts.getDouble("zIndex"));
}
if (opts.has("visible")) {
options.visible(opts.getBoolean("visible"));
}
TileOverlay tileOverlay = this.map.addTileOverlay(options);
String id = "tile_" + tileOverlay.getId();
this.objects.put(id, tileOverlay);
this.objects.put("tileProvider_" + id.replace("tile_", "tileProvider_"), tileProvider);
JSONObject result = new JSONObject();
result.put("hashCode", tileOverlay.hashCode());
result.put("id", id);
callbackContext.success(result);
}
/**
* set z-index
* @param args
* @param callbackContext
* @throws JSONException
*/
@SuppressWarnings("unused")
private void setZIndex(final JSONArray args, final CallbackContext callbackContext) throws JSONException {
String id = args.getString(1);
float zIndex = (float) args.getDouble(2);
this.setFloat("setZIndex", id, zIndex, callbackContext);
}
/**
* Set visibility for the object
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void setVisible(JSONArray args, CallbackContext callbackContext) throws JSONException {
boolean visible = args.getBoolean(2);
String id = args.getString(1);
this.setBoolean("setVisible", id, visible, callbackContext);
}
/**
* Remove this tile layer
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void remove(JSONArray args, CallbackContext callbackContext) throws JSONException {
String id = args.getString(1);
TileOverlay tileOverlay = (TileOverlay)this.objects.get(id);
if (tileOverlay == null) {
this.sendNoResult(callbackContext);
return;
}
tileOverlay.remove();
tileOverlay.clearTileCache();
id = id.replace("tile_", "tileProvider_");
this.objects.put(id, null);
this.objects.remove(id);
this.sendNoResult(callbackContext);
}
/**
* Clear cache
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void clearTileCache(JSONArray args, CallbackContext callbackContext) throws JSONException {
String id = args.getString(1);
TileOverlay tileOverlay = (TileOverlay)this.objects.get(id);
tileOverlay.clearTileCache();
this.sendNoResult(callbackContext);
}
/**
* Set fadeIn for the object
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void setFadeIn(JSONArray args, CallbackContext callbackContext) throws JSONException {
boolean visible = args.getBoolean(2);
String id = args.getString(1);
this.setBoolean("setFadeIn", id, visible, callbackContext);
}
/**
* Set opacity for the tile layer
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void setOpacity(JSONArray args, CallbackContext callbackContext) throws JSONException {
double opacity = args.getDouble(2);
String id = args.getString(1);
id = id.replace("tile_", "tileProvider_");
PluginTileProvider tileProvider = (PluginTileProvider)this.objects.get(id);
tileProvider.setOpacity(opacity);
}
}
|
src/android/plugin/google/maps/PluginTileOverlay.java
|
package plugin.google.maps;
import org.apache.cordova.CallbackContext;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.google.android.gms.maps.model.TileOverlay;
import com.google.android.gms.maps.model.TileOverlayOptions;
public class PluginTileOverlay extends MyPlugin implements MyPluginInterface {
/**
* Create tile overlay
*
* @param args
* @param callbackContext
* @throws JSONException
*/
@SuppressWarnings("unused")
private void createTileOverlay(final JSONArray args,
final CallbackContext callbackContext) throws JSONException {
JSONObject opts = args.getJSONObject(1);
int tileSize = opts.getInt("tileSize");
final String tileUrlFormat = opts.getString("tileUrlFormat");
double opacity = 1.0;
if (opts.has("opacity")) {
opacity = opts.getDouble("opacity");
}
PluginTileProvider tileProvider = new PluginTileProvider(tileUrlFormat, opacity, tileSize);
TileOverlayOptions options = new TileOverlayOptions();
options.tileProvider(tileProvider);
if (opts.has("zIndex")) {
options.zIndex((float)opts.getDouble("zIndex"));
}
if (opts.has("visible")) {
options.visible(opts.getBoolean("visible"));
}
TileOverlay tileOverlay = this.map.addTileOverlay(options);
String id = "tile_" + tileOverlay.getId();
this.objects.put("tileProvider_" + id, tileProvider);
JSONObject result = new JSONObject();
result.put("hashCode", tileOverlay.hashCode());
result.put("id", id);
callbackContext.success(result);
}
/**
* set z-index
* @param args
* @param callbackContext
* @throws JSONException
*/
@SuppressWarnings("unused")
private void setZIndex(final JSONArray args, final CallbackContext callbackContext) throws JSONException {
String id = args.getString(1);
float zIndex = (float) args.getDouble(2);
this.setFloat("setZIndex", id, zIndex, callbackContext);
}
/**
* Set visibility for the object
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void setVisible(JSONArray args, CallbackContext callbackContext) throws JSONException {
boolean visible = args.getBoolean(2);
String id = args.getString(1);
this.setBoolean("setVisible", id, visible, callbackContext);
}
/**
* Remove this tile layer
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void remove(JSONArray args, CallbackContext callbackContext) throws JSONException {
String id = args.getString(1);
TileOverlay tileOverlay = (TileOverlay)this.objects.get(id);
if (tileOverlay == null) {
this.sendNoResult(callbackContext);
return;
}
tileOverlay.remove();
tileOverlay.clearTileCache();
id = id.replace("tile_", "tileProvider_");
this.objects.put(id, null);
this.objects.remove(id);
this.sendNoResult(callbackContext);
}
/**
* Clear cache
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void clearTileCache(JSONArray args, CallbackContext callbackContext) throws JSONException {
String id = args.getString(1);
TileOverlay tileOverlay = (TileOverlay)this.objects.get(id);
tileOverlay.clearTileCache();
this.sendNoResult(callbackContext);
}
/**
* Set fadeIn for the object
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void setFadeIn(JSONArray args, CallbackContext callbackContext) throws JSONException {
boolean visible = args.getBoolean(2);
String id = args.getString(1);
this.setBoolean("setFadeIn", id, visible, callbackContext);
}
/**
* Set opacity for the tile layer
* @param args
* @param callbackContext
* @throws JSONException
*/
protected void setOpacity(JSONArray args, CallbackContext callbackContext) throws JSONException {
double opacity = args.getDouble(2);
String id = args.getString(1);
id = id.replace("tile_", "tileProvider_");
PluginTileProvider tileProvider = (PluginTileProvider)this.objects.get(id);
tileProvider.setOpacity(opacity);
}
}
|
fix TileOverlay remove bug
|
src/android/plugin/google/maps/PluginTileOverlay.java
|
fix TileOverlay remove bug
|
|
Java
|
apache-2.0
|
fb535e0f18a12cc5faacae1d01fcf0c195a428a3
| 0
|
Codigami/pinterest4j
|
/*
* Copyright (c) 2017 Aniket Divekar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pinterest4j.entity;
import pinterest4j.util.http.HttpResponse;
import java.util.List;
import java.util.Map;
/**
* Entity representing Rate limit status for Pinterest API
*
* Created by Aniket Divekar.
*/
public class RateLimitStatus {
private int limit;
private int remaining;
private RateLimitStatus(int limit, int remaining) {
this.limit = limit;
this.remaining = remaining;
}
public static RateLimitStatus createRateLimitStatus(HttpResponse res) {
if (null == res || res.getResponseHeaderFields() == null) {
return null;
}
Map<String, List<String>> headerFields = res.getResponseHeaderFields();
List<String> limits = headerFields.get("X-Ratelimit-Limit");
List<String> remaining = headerFields.get("X-Ratelimit-Remaining");
if (limits == null || remaining == null || limits.isEmpty() || remaining.isEmpty()) {
return null;
}
return new RateLimitStatus(Integer.valueOf(limits.get(0)), Integer.valueOf(remaining.get(0)));
}
public int getLimit() {
return limit;
}
public int getRemaining() {
return remaining;
}
}
|
src/main/java/pinterest4j/entity/RateLimitStatus.java
|
/*
* Copyright (c) 2017 Aniket Divekar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pinterest4j.entity;
import pinterest4j.util.http.HttpResponse;
import java.util.List;
import java.util.Map;
/**
* Entity representing Rate limit status for Pinterest API
*
* Created by Aniket Divekar.
*/
public class RateLimitStatus {
private int limit;
private int remaining;
private RateLimitStatus(int limit, int remaining) {
this.limit = limit;
this.remaining = remaining;
}
static RateLimitStatus createRateLimitStatus(HttpResponse res) {
if (null == res || res.getResponseHeaderFields() == null) {
return null;
}
Map<String, List<String>> headerFields = res.getResponseHeaderFields();
List<String> limits = headerFields.get("X-Ratelimit-Limit");
List<String> remaining = headerFields.get("X-Ratelimit-Remaining");
if (limits == null || remaining == null || limits.isEmpty() || remaining.isEmpty()) {
return null;
}
return new RateLimitStatus(Integer.valueOf(limits.get(0)), Integer.valueOf(remaining.get(0)));
}
public void init (int limit, int remaining) {
this.limit = limit;
this.remaining = remaining;
}
public int getLimit() {
return limit;
}
public int getRemaining() {
return remaining;
}
}
|
giving `createRateLimitStatus` public access instead of `package` level access.
|
src/main/java/pinterest4j/entity/RateLimitStatus.java
|
giving `createRateLimitStatus` public access instead of `package` level access.
|
|
Java
|
apache-2.0
|
4a50ade7a8a9cad2832b00658b99a0cc8411efb2
| 0
|
gnodet/camel,gnodet/camel,tadayosi/camel,cunningt/camel,tdiesler/camel,christophd/camel,davidkarlsen/camel,nicolaferraro/camel,Fabryprog/camel,davidkarlsen/camel,pax95/camel,tdiesler/camel,ullgren/camel,nicolaferraro/camel,pmoerenhout/camel,Fabryprog/camel,pax95/camel,zregvart/camel,alvinkwekel/camel,DariusX/camel,CodeSmell/camel,CodeSmell/camel,pmoerenhout/camel,davidkarlsen/camel,CodeSmell/camel,apache/camel,tdiesler/camel,nicolaferraro/camel,alvinkwekel/camel,adessaigne/camel,adessaigne/camel,davidkarlsen/camel,adessaigne/camel,mcollovati/camel,CodeSmell/camel,cunningt/camel,zregvart/camel,pmoerenhout/camel,DariusX/camel,tadayosi/camel,pax95/camel,DariusX/camel,zregvart/camel,pax95/camel,Fabryprog/camel,objectiser/camel,tdiesler/camel,pmoerenhout/camel,cunningt/camel,mcollovati/camel,ullgren/camel,christophd/camel,tadayosi/camel,adessaigne/camel,christophd/camel,nikhilvibhav/camel,nikhilvibhav/camel,pmoerenhout/camel,apache/camel,tadayosi/camel,objectiser/camel,DariusX/camel,cunningt/camel,objectiser/camel,nicolaferraro/camel,pmoerenhout/camel,gnodet/camel,zregvart/camel,tdiesler/camel,tadayosi/camel,adessaigne/camel,mcollovati/camel,alvinkwekel/camel,tadayosi/camel,adessaigne/camel,ullgren/camel,apache/camel,apache/camel,mcollovati/camel,alvinkwekel/camel,christophd/camel,apache/camel,apache/camel,pax95/camel,gnodet/camel,gnodet/camel,nikhilvibhav/camel,nikhilvibhav/camel,pax95/camel,christophd/camel,cunningt/camel,Fabryprog/camel,tdiesler/camel,ullgren/camel,objectiser/camel,cunningt/camel,christophd/camel
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mongodb.gridfs;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.ReadPreference;
import com.mongodb.WriteConcern;
import com.mongodb.gridfs.GridFS;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultEndpoint;
/**
* Component for working with MongoDB GridFS.
*/
@UriEndpoint(firstVersion = "2.18.0", scheme = "mongodb-gridfs", title = "MongoDB GridFS", syntax = "mongodb-gridfs:connectionBean", label = "database,nosql")
public class GridFsEndpoint extends DefaultEndpoint {
public static final String GRIDFS_OPERATION = "gridfs.operation";
public static final String GRIDFS_METADATA = "gridfs.metadata";
public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
public static final String GRIDFS_FILE_ID_PRODUCED = "gridfs.fileid";
@UriPath @Metadata(required = true)
private String connectionBean;
@UriParam @Metadata(required = true)
private String database;
@UriParam(defaultValue = GridFS.DEFAULT_BUCKET)
private String bucket;
@UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE")
private WriteConcern writeConcern;
@UriParam
private WriteConcern writeConcernRef;
@UriParam
private ReadPreference readPreference;
@UriParam(label = "producer")
private String operation;
@UriParam(label = "consumer")
private String query;
@UriParam(label = "consumer", defaultValue = "1000")
private long initialDelay = 1000;
@UriParam(label = "consumer", defaultValue = "500")
private long delay = 500;
@UriParam(label = "consumer", defaultValue = "TimeStamp")
private QueryStrategy queryStrategy = QueryStrategy.TimeStamp;
@UriParam(label = "consumer", defaultValue = "camel-timestamps")
private String persistentTSCollection = "camel-timestamps";
@UriParam(label = "consumer", defaultValue = "camel-timestamp")
private String persistentTSObject = "camel-timestamp";
@UriParam(label = "consumer", defaultValue = "camel-processed")
private String fileAttributeName = "camel-processed";
private Mongo mongoConnection;
private DB db;
private GridFS gridFs;
private DBCollection filesCollection;
public GridFsEndpoint(String uri, GridFsComponent component) {
super(uri, component);
}
@Override
public Producer createProducer() throws Exception {
initializeConnection();
return new GridFsProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
initializeConnection();
return new GridFsConsumer(this, processor);
}
public void initializeConnection() throws Exception {
log.info("Initialize GridFS endpoint: {}", this);
if (database == null) {
throw new IllegalStateException("Missing required endpoint configuration: database");
}
db = mongoConnection.getDB(database);
if (db == null) {
throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist.");
}
gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket) {
{
filesCollection = getFilesCollection();
}
};
}
@Override
protected void doStart() throws Exception {
if (writeConcern != null && writeConcernRef != null) {
String msg = "Cannot set both writeConcern and writeConcernRef at the same time. Respective values: " + writeConcern
+ ", " + writeConcernRef + ". Aborting initialization.";
throw new IllegalArgumentException(msg);
}
mongoConnection = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, MongoClient.class);
log.debug("Resolved the connection with the name {} as {}", connectionBean, mongoConnection);
setWriteReadOptionsOnConnection();
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (mongoConnection != null) {
log.debug("Closing connection");
mongoConnection.close();
}
}
private void setWriteReadOptionsOnConnection() {
// Set the WriteConcern
if (writeConcern != null) {
mongoConnection.setWriteConcern(writeConcern);
} else if (writeConcernRef != null) {
mongoConnection.setWriteConcern(writeConcernRef);
}
// Set the ReadPreference
if (readPreference != null) {
mongoConnection.setReadPreference(readPreference);
}
}
// ======= Getters and setters ===============================================
public String getConnectionBean() {
return connectionBean;
}
/**
* Name of {@link com.mongodb.Mongo} to use.
*/
public void setConnectionBean(String connectionBean) {
this.connectionBean = connectionBean;
}
public Mongo getMongoConnection() {
return mongoConnection;
}
/**
* Sets the Mongo instance that represents the backing connection
*
* @param mongoConnection the connection to the database
*/
public void setMongoConnection(Mongo mongoConnection) {
this.mongoConnection = mongoConnection;
}
public DB getDB() {
return db;
}
public String getDatabase() {
return database;
}
/**
* Sets the name of the MongoDB database to target
*
* @param database name of the MongoDB database
*/
public void setDatabase(String database) {
this.database = database;
}
/**
* Sets the name of the GridFS bucket within the database. Default is "fs".
*
* @param database name of the MongoDB database
*/
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
public String getQuery() {
return query;
}
/**
* Additional query parameters (in JSON) that are used to configure the query used for finding
* files in the GridFsConsumer
* @param query
*/
public void setQuery(String query) {
this.query = query;
}
public long getDelay() {
return delay;
}
/**
* Sets the delay between polls within the Consumer. Default is 500ms
* @param delay
*/
public void setDelay(long delay) {
this.delay = delay;
}
public long getInitialDelay() {
return initialDelay;
}
/**
* Sets the initialDelay before the consumer will start polling. Default is 1000ms
* @param initialDelay
*/
public void setInitialDelay(long initialDelay) {
this.initialDelay = delay;
}
/**
* Sets the QueryStrategy that is used for polling for new files. Default is Timestamp
* @see QueryStrategy
* @param s
*/
public void setQueryStrategy(String s) {
queryStrategy = QueryStrategy.valueOf(s);
}
public QueryStrategy getQueryStrategy() {
return queryStrategy;
}
/**
* If the QueryType uses a persistent timestamp, this sets the name of the collection within
* the DB to store the timestamp.
* @param s
*/
public void setPersistentTSCollection(String s) {
persistentTSCollection = s;
}
public String getPersistentTSCollection() {
return persistentTSCollection;
}
/**
* If the QueryType uses a persistent timestamp, this is the ID of the object in the collection
* to store the timestamp.
* @param s
*/
public void setPersistentTSObject(String id) {
persistentTSObject = id;
}
public String getPersistentTSObject() {
return persistentTSObject;
}
/**
* If the QueryType uses a FileAttribute, this sets the name of the attribute that is used. Default is "camel-processed".
* @param f
*/
public void setFileAttributeName(String f) {
fileAttributeName = f;
}
public String getFileAttributeName() {
return fileAttributeName;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
* Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.
*
* @param writeConcern the standard name of the WriteConcern
* @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible options</a>
*/
public void setWriteConcern(String writeConcern) {
this.writeConcern = WriteConcern.valueOf(writeConcern);
}
public WriteConcern getWriteConcern() {
return writeConcern;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean ref to a custom WriteConcern which exists in the Registry.
* You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String) setWriteConcern} method.
*
* @param writeConcernRef the name of the bean in the registry that represents the WriteConcern to use
*/
public void setWriteConcernRef(String writeConcernRef) {
WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef, WriteConcern.class);
if (wc == null) {
String msg = "Camel MongoDB component could not find the WriteConcern in the Registry. Verify that the "
+ "provided bean name (" + writeConcernRef + ") is correct. Aborting initialization.";
throw new IllegalArgumentException(msg);
}
this.writeConcernRef = wc;
}
public WriteConcern getWriteConcernRef() {
return writeConcernRef;
}
/**
* Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set directly on the connection will be
* overridden by this setting.
* <p/>
* The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve the passed {@code readPreference}
* value. Some examples for the possible values are {@code nearest}, {@code primary} or {@code secondary} etc.
*
* @param readPreference the name of the read preference to set
*/
public void setReadPreference(String readPreference) {
this.readPreference = ReadPreference.valueOf(readPreference);
}
public ReadPreference getReadPreference() {
return readPreference;
}
/**
* Sets the operation this endpoint will execute against GridRS.
*/
public void setOperation(String operation) {
this.operation = operation;
}
public String getOperation() {
return operation;
}
public GridFS getGridFs() {
return gridFs;
}
public void setGridFs(GridFS gridFs) {
this.gridFs = gridFs;
}
public DBCollection getFilesCollection() {
return filesCollection;
}
}
|
components/camel-mongodb-gridfs/src/main/java/org/apache/camel/component/mongodb/gridfs/GridFsEndpoint.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mongodb.gridfs;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.Mongo;
import com.mongodb.MongoClient;
import com.mongodb.ReadPreference;
import com.mongodb.WriteConcern;
import com.mongodb.gridfs.GridFS;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultEndpoint;
/**
* Component for working with MongoDB GridFS.
*/
@UriEndpoint(firstVersion = "2.18.0", scheme = "mongodb-gridfs", title = "MongoDB GridFS", syntax = "mongodb-gridfs:connectionBean", label = "database,nosql")
public class GridFsEndpoint extends DefaultEndpoint {
public static final String GRIDFS_OPERATION = "gridfs.operation";
public static final String GRIDFS_METADATA = "gridfs.metadata";
public static final String GRIDFS_CHUNKSIZE = "gridfs.chunksize";
public static final String GRIDFS_FILE_ID_PRODUCED = "gridfs.fileid";
@UriPath @Metadata(required = true)
private String connectionBean;
@UriParam @Metadata(required = true)
private String database;
@UriParam(defaultValue = GridFS.DEFAULT_BUCKET)
private String bucket;
@UriParam(enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY,SAFE")
private WriteConcern writeConcern;
@UriParam
private WriteConcern writeConcernRef;
@UriParam
private ReadPreference readPreference;
@UriParam(label = "producer")
private String operation;
@UriParam(label = "consumer")
private String query;
@UriParam(label = "consumer", defaultValue = "1000")
private long initialDelay = 1000;
@UriParam(label = "consumer", defaultValue = "500")
private long delay = 500;
@UriParam(label = "consumer", defaultValue = "TimeStamp")
private QueryStrategy queryStrategy = QueryStrategy.TimeStamp;
@UriParam(label = "consumer", defaultValue = "camel-timestamps")
private String persistentTSCollection = "camel-timestamps";
@UriParam(label = "consumer", defaultValue = "camel-timestamp")
private String persistentTSObject = "camel-timestamp";
@UriParam(label = "consumer", defaultValue = "camel-processed")
private String fileAttributeName = "camel-processed";
private Mongo mongoConnection;
private DB db;
private GridFS gridFs;
private DBCollection filesCollection;
public GridFsEndpoint(String uri, GridFsComponent component) {
super(uri, component);
}
@Override
public Producer createProducer() throws Exception {
initializeConnection();
return new GridFsProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
initializeConnection();
return new GridFsConsumer(this, processor);
}
public void initializeConnection() throws Exception {
log.info("Initialize GridFS endpoint: {}", this);
if (database == null) {
throw new IllegalStateException("Missing required endpoint configuration: database");
}
db = mongoConnection.getDB(database);
if (db == null) {
throw new IllegalStateException("Could not initialize GridFsComponent. Database " + database + " does not exist.");
}
gridFs = new GridFS(db, bucket == null ? GridFS.DEFAULT_BUCKET : bucket) {
{
filesCollection = getFilesCollection();
}
};
}
@Override
protected void doStart() throws Exception {
if (writeConcern != null && writeConcernRef != null) {
String msg = "Cannot set both writeConcern and writeConcernRef at the same time. Respective values: " + writeConcern
+ ", " + writeConcernRef + ". Aborting initialization.";
throw new IllegalArgumentException(msg);
}
mongoConnection = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, MongoClient.class);
log.debug("Resolved the connection with the name {} as {}", connectionBean, mongoConnection);
setWriteReadOptionsOnConnection();
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (mongoConnection != null) {
log.debug("Closing connection");
mongoConnection.close();
}
}
private void setWriteReadOptionsOnConnection() {
// Set the WriteConcern
if (writeConcern != null) {
mongoConnection.setWriteConcern(writeConcern);
} else if (writeConcernRef != null) {
mongoConnection.setWriteConcern(writeConcernRef);
}
// Set the ReadPreference
if (readPreference != null) {
mongoConnection.setReadPreference(readPreference);
}
}
// ======= Getters and setters ===============================================
public String getConnectionBean() {
return connectionBean;
}
/**
* Name of {@link com.mongodb.Mongo} to use.
*/
public void setConnectionBean(String connectionBean) {
this.connectionBean = connectionBean;
}
public Mongo getMongoConnection() {
return mongoConnection;
}
/**
* Sets the Mongo instance that represents the backing connection
*
* @param mongoConnection the connection to the database
*/
public void setMongoConnection(Mongo mongoConnection) {
this.mongoConnection = mongoConnection;
}
public DB getDB() {
return db;
}
public String getDatabase() {
return database;
}
/**
* Sets the name of the MongoDB database to target
*
* @param database name of the MongoDB database
*/
public void setDatabase(String database) {
this.database = database;
}
/**
* Sets the name of the GridFS bucket within the database. Default is "fs".
*
* @param database name of the MongoDB database
*/
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
public String getQuery() {
return query;
}
/**
* Additional query parameters (in JSON) that are used to configure the query used for finding
* files in the GridFsConsumer
* @param query
*/
public void setQuery(String query) {
this.query = query;
}
public long getDelay() {
return delay;
}
/**
* Sets the delay between polls within the Consumer. Default is 500ms
* @param delay
*/
public void setDelay(long delay) {
this.delay = delay;
}
public long getInitialDelay() {
return initialDelay;
}
/**
* Sets the initialDelay before the consumer will start polling. Default is 1000ms
* @param initialDelay
*/
public void setInitialDelay(long initialDelay) {
this.initialDelay = delay;
}
/**
* Sets the QueryStrategy that is used for polling for new files. Default is Timestamp
* @see QueryStrategy
* @param s
*/
public void setQueryStrategy(String s) {
queryStrategy = QueryStrategy.valueOf(s);
}
public QueryStrategy getQueryStrategy() {
return queryStrategy;
}
/**
* If the QueryType uses a persistent timestamp, this sets the name of the collection within
* the DB to store the timestamp.
* @param s
*/
public void setPersistentTSCollection(String s) {
persistentTSCollection = s;
}
public String getPersistentTSCollection() {
return persistentTSCollection;
}
/**
* If the QueryType uses a persistent timestamp, this is the ID of the object in the collection
* to store the timestamp.
* @param s
*/
public void setPersistentTSObject(String id) {
persistentTSObject = id;
}
public String getPersistentTSObject() {
return persistentTSObject;
}
/**
* If the QueryType uses a FileAttribute, this sets the name of the attribute that is used. Default is "camel-processed".
* @param f
*/
public void setFileAttributeName(String f) {
fileAttributeName = f;
}
public String getFileAttributeName() {
return fileAttributeName;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB using the standard ones.
* Resolved from the fields of the WriteConcern class by calling the {@link WriteConcern#valueOf(String)} method.
*
* @param writeConcern the standard name of the WriteConcern
* @see <a href="http://api.mongodb.org/java/current/com/mongodb/WriteConcern.html#valueOf(java.lang.String)">possible options</a>
*/
public void setWriteConcern(String writeConcern) {
this.writeConcern = WriteConcern.valueOf(writeConcern);
}
public WriteConcern getWriteConcern() {
return writeConcern;
}
/**
* Set the {@link WriteConcern} for write operations on MongoDB, passing in the bean ref to a custom WriteConcern which exists in the Registry.
* You can also use standard WriteConcerns by passing in their key. See the {@link #setWriteConcern(String) setWriteConcern} method.
*
* @param writeConcernRef the name of the bean in the registry that represents the WriteConcern to use
*/
public void setWriteConcernRef(String writeConcernRef) {
WriteConcern wc = this.getCamelContext().getRegistry().lookupByNameAndType(writeConcernRef, WriteConcern.class);
if (wc == null) {
String msg = "Camel MongoDB component could not find the WriteConcern in the Registry. Verify that the "
+ "provided bean name (" + writeConcernRef + ") is correct. Aborting initialization.";
throw new IllegalArgumentException(msg);
}
this.writeConcernRef = wc;
}
public WriteConcern getWriteConcernRef() {
return writeConcernRef;
}
/**
* Sets a MongoDB {@link ReadPreference} on the Mongo connection. Read preferences set directly on the connection will be
* overridden by this setting.
* <p/>
* The {@link com.mongodb.ReadPreference#valueOf(String)} utility method is used to resolve the passed {@code readPreference}
* value. Some examples for the possible values are {@code nearest}, {@code primary} or {@code secondary} etc.
*
* @param readPreference the name of the read preference to set
*/
public void setReadPreference(String readPreference) {
this.readPreference = ReadPreference.valueOf(readPreference);
}
public ReadPreference getReadPreference() {
return readPreference;
}
/**
* Sets the operation this endpoint will execute against GridRS.
*/
public void setOperation(String operation) {
this.operation = operation;
}
public String getOperation() {
return operation;
}
public GridFS getGridFs() {
return gridFs;
}
public void setGridFs(GridFS gridFs) {
this.gridFs = gridFs;
}
public DBCollection getFilesCollection() {
return filesCollection;
}
}
|
Fixed CS for Camel-MongoDB-Gridfs
|
components/camel-mongodb-gridfs/src/main/java/org/apache/camel/component/mongodb/gridfs/GridFsEndpoint.java
|
Fixed CS for Camel-MongoDB-Gridfs
|
|
Java
|
apache-2.0
|
8583e36d447b1afe8333aa7bd52b373144359ba2
| 0
|
fengbaicanhe/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,fnouama/intellij-community,hurricup/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,dslomov/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,blademainer/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,jagguli/intellij-community,dslomov/intellij-community,slisson/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,ftomassetti/intellij-community,caot/intellij-community,FHannes/intellij-community,kool79/intellij-community,supersven/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,amith01994/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,joewalnes/idea-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,jexp/idea2,mglukhikh/intellij-community,blademainer/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,akosyakov/intellij-community,supersven/intellij-community,slisson/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,jexp/idea2,MER-GROUP/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,dslomov/intellij-community,FHannes/intellij-community,diorcety/intellij-community,apixandru/intellij-community,jexp/idea2,dslomov/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,da1z/intellij-community,ernestp/consulo,ernestp/consulo,alphafoobar/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,da1z/intellij-community,robovm/robovm-studio,holmes/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,izonder/intellij-community,fitermay/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,supersven/intellij-community,consulo/consulo,ibinti/intellij-community,xfournet/intellij-community,supersven/intellij-community,da1z/intellij-community,holmes/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,fnouama/intellij-community,ahb0327/intellij-community,clumsy/intellij-community,signed/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,amith01994/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,clumsy/intellij-community,samthor/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,allotria/intellij-community,robovm/robovm-studio,hurricup/intellij-community,samthor/intellij-community,hurricup/intellij-community,jexp/idea2,MER-GROUP/intellij-community,pwoodworth/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,amith01994/intellij-community,ryano144/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,amith01994/intellij-community,supersven/intellij-community,ahb0327/intellij-community,allotria/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,diorcety/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,consulo/consulo,lucafavatella/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,ryano144/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,caot/intellij-community,izonder/intellij-community,semonte/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,fitermay/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,samthor/intellij-community,xfournet/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,amith01994/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,amith01994/intellij-community,slisson/intellij-community,slisson/intellij-community,hurricup/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,diorcety/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,caot/intellij-community,jexp/idea2,samthor/intellij-community,vladmm/intellij-community,FHannes/intellij-community,kool79/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,joewalnes/idea-community,TangHao1987/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,kool79/intellij-community,ryano144/intellij-community,fitermay/intellij-community,xfournet/intellij-community,dslomov/intellij-community,da1z/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,signed/intellij-community,signed/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,samthor/intellij-community,signed/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,signed/intellij-community,da1z/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,caot/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,consulo/consulo,asedunov/intellij-community,slisson/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,signed/intellij-community,slisson/intellij-community,kdwink/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,diorcety/intellij-community,kool79/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,vladmm/intellij-community,semonte/intellij-community,caot/intellij-community,holmes/intellij-community,vladmm/intellij-community,caot/intellij-community,retomerz/intellij-community,petteyg/intellij-community,clumsy/intellij-community,ernestp/consulo,samthor/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,signed/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,supersven/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,blademainer/intellij-community,clumsy/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,kdwink/intellij-community,signed/intellij-community,consulo/consulo,kdwink/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,petteyg/intellij-community,semonte/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,FHannes/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,samthor/intellij-community,consulo/consulo,youdonghai/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,fnouama/intellij-community,jexp/idea2,nicolargo/intellij-community,asedunov/intellij-community,retomerz/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,semonte/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,TangHao1987/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,signed/intellij-community,semonte/intellij-community,ibinti/intellij-community,semonte/intellij-community,salguarnieri/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,joewalnes/idea-community,diorcety/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,da1z/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,allotria/intellij-community,clumsy/intellij-community,ernestp/consulo,vvv1559/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,adedayo/intellij-community,ernestp/consulo,blademainer/intellij-community,fitermay/intellij-community,kool79/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,joewalnes/idea-community,idea4bsd/idea4bsd,joewalnes/idea-community,Distrotech/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,signed/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,adedayo/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,kool79/intellij-community,asedunov/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,jexp/idea2,consulo/consulo,ivan-fedorov/intellij-community,FHannes/intellij-community,dslomov/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,holmes/intellij-community,youdonghai/intellij-community,jexp/idea2,da1z/intellij-community,semonte/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,kool79/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,adedayo/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,robovm/robovm-studio,samthor/intellij-community,da1z/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,ryano144/intellij-community,semonte/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,supersven/intellij-community,caot/intellij-community,ibinti/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,kool79/intellij-community,retomerz/intellij-community,diorcety/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,joewalnes/idea-community,holmes/intellij-community,supersven/intellij-community,robovm/robovm-studio,allotria/intellij-community,petteyg/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,da1z/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,holmes/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,supersven/intellij-community,ryano144/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,izonder/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,amith01994/intellij-community,FHannes/intellij-community,slisson/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,vladmm/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,semonte/intellij-community,retomerz/intellij-community,izonder/intellij-community,ahb0327/intellij-community,allotria/intellij-community,apixandru/intellij-community,dslomov/intellij-community,ryano144/intellij-community,allotria/intellij-community,petteyg/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,caot/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,Distrotech/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,da1z/intellij-community,FHannes/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,caot/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,fitermay/intellij-community,caot/intellij-community,joewalnes/idea-community,mglukhikh/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,clumsy/intellij-community,kdwink/intellij-community,retomerz/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,kool79/intellij-community,petteyg/intellij-community,vladmm/intellij-community,vladmm/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,signed/intellij-community,ernestp/consulo,SerCeMan/intellij-community,kdwink/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,apixandru/intellij-community
|
package com.intellij.localvcs;
import java.util.ArrayList;
import java.util.List;
public class LocalVcs {
private Storage myStorage;
private ChangeList myChangeList;
private RootEntry myRoot;
private Integer myEntryCounter;
private List<Change> myPendingChanges = new ArrayList<Change>();
public LocalVcs(Storage s) {
myStorage = s;
load();
}
private void load() {
myChangeList = myStorage.loadChangeList();
myRoot = myStorage.loadRootEntry();
myEntryCounter = myStorage.loadCounter();
}
protected void store() {
myStorage.storeChangeList(myChangeList);
myStorage.storeRootEntry(myRoot);
myStorage.storeCounter(myEntryCounter);
}
public boolean hasEntry(Path path) {
return myRoot.hasEntry(path);
}
public Entry getEntry(Path path) {
return myRoot.getEntry(path);
}
public void createFile(Path path, String content) {
myPendingChanges.add(new CreateFileChange(path, content, getNextId()));
}
public void createDirectory(Path path) {
myPendingChanges.add(new CreateDirectoryChange(path, getNextId()));
}
private Integer getNextId() {
return myEntryCounter++;
}
public void changeFileContent(Path path, String content) {
myPendingChanges.add(new ChangeFileContentChange(path, content));
}
public void rename(Path path, String newName) {
myPendingChanges.add(new RenameChange(path, newName));
}
public void move(Path path, Path newParent) {
myPendingChanges.add(new MoveChange(path, newParent));
}
public void delete(Path path) {
myPendingChanges.add(new DeleteChange(path));
}
public Boolean isClean() {
return myPendingChanges.isEmpty();
}
private void clearPendingChanges() {
myPendingChanges = new ArrayList<Change>();
}
public void apply() {
ChangeSet cs = new ChangeSet(myPendingChanges);
myRoot = myChangeList.applyChangeSetOn(myRoot, cs);
clearPendingChanges();
store();
}
public void revert() {
clearPendingChanges();
myRoot = myChangeList.revertOn(myRoot);
}
public void putLabel(String label) {
myChangeList.setLabel(myRoot, label);
}
public RootEntry getSnapshot(String label) {
// todo rename me
for (RootEntry r : getHistory()) {
if (label.equals(myChangeList.getLabel(r))) return r;
}
throw new LocalVcsException();
}
public List<Entry> getEntryHistory(Path path) {
// todo optimize me and clean up this mess
if (!hasEntry(path)) throw new LocalVcsException();
List<Entry> result = new ArrayList<Entry>();
Integer id = getEntry(path).getObjectId();
for (RootEntry r : getHistory()) {
if (!r.hasEntry(id)) break;
result.add(r.getEntry(id));
}
return result;
}
public List<RootEntry> getHistory() {
List<RootEntry> result = new ArrayList<RootEntry>();
RootEntry r = myRoot;
while (r.canBeReverted()) {
result.add(r);
r = myChangeList.revertOn(r);
}
return result;
}
}
|
LocalVcs/src/com/intellij/localvcs/LocalVcs.java
|
package com.intellij.localvcs;
import java.util.ArrayList;
import java.util.List;
public class LocalVcs {
private Storage myStorage;
private ChangeList myChangeList;
private RootEntry myRoot;
private Integer myEntryCounter;
private List<Change> myPendingChanges = new ArrayList<Change>();
public LocalVcs(Storage s) {
myStorage = s;
load();
}
private void load() {
myChangeList = myStorage.loadChangeList();
myRoot = myStorage.loadRootEntry();
myEntryCounter = myStorage.loadCounter();
}
protected void store() {
myStorage.storeChangeList(myChangeList);
myStorage.storeRootEntry(myRoot);
myStorage.storeCounter(myEntryCounter);
}
public boolean hasEntry(Path path) {
return myRoot.hasEntry(path);
}
public Entry getEntry(Path path) {
return myRoot.getEntry(path);
}
public List<Entry> getEntryHistory(Path path) {
// todo optimize me and clean up this mess
if (!hasEntry(path)) throw new LocalVcsException();
List<Entry> result = new ArrayList<Entry>();
Integer id = getEntry(path).getObjectId();
for (RootEntry r : getHistory()) {
if (!r.hasEntry(id)) break;
result.add(r.getEntry(id));
}
return result;
}
public void createFile(Path path, String content) {
myPendingChanges.add(new CreateFileChange(path, content, getNextId()));
}
public void createDirectory(Path path) {
myPendingChanges.add(new CreateDirectoryChange(path, getNextId()));
}
private Integer getNextId() {
return myEntryCounter++;
}
public void changeFileContent(Path path, String content) {
myPendingChanges.add(new ChangeFileContentChange(path, content));
}
public void rename(Path path, String newName) {
myPendingChanges.add(new RenameChange(path, newName));
}
public void move(Path path, Path newParent) {
myPendingChanges.add(new MoveChange(path, newParent));
}
public void delete(Path path) {
myPendingChanges.add(new DeleteChange(path));
}
public Boolean isClean() {
return myPendingChanges.isEmpty();
}
private void clearPendingChanges() {
myPendingChanges = new ArrayList<Change>();
}
public void apply() {
ChangeSet cs = new ChangeSet(myPendingChanges);
myRoot = myChangeList.applyChangeSetOn(myRoot, cs);
clearPendingChanges();
store();
}
public void revert() {
clearPendingChanges();
myRoot = myChangeList.revertOn(myRoot);
}
public void putLabel(String label) {
myChangeList.setLabel(myRoot, label);
}
public RootEntry getSnapshot(String label) {
// todo rename me
for (RootEntry r : getHistory()) {
if (label.equals(myChangeList.getLabel(r))) return r;
}
throw new LocalVcsException();
}
public List<RootEntry> getHistory() {
List<RootEntry> result = new ArrayList<RootEntry>();
RootEntry r = myRoot;
while (r.canBeReverted()) {
result.add(r);
r = myChangeList.revertOn(r);
}
return result;
}
}
|
(no message)
|
LocalVcs/src/com/intellij/localvcs/LocalVcs.java
|
(no message)
|
|
Java
|
apache-2.0
|
9f3e916bec7be30d038961faab968f2d1ff9af4b
| 0
|
EightBitBoy/hijacr,EightBitBoy/hijacr
|
package de.eightbitboy.hijacr.fragments;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
public class PagerFragmentAdapter extends FragmentPagerAdapter {
public class Pages {
public static final int COMIC_LIST = 0;
public static final int COMIC_VIEWER = 1;
private static final int NUMBER_ITEMS = 2;
}
public PagerFragmentAdapter(FragmentManager fragmentManager) {
super(fragmentManager);
}
@Override
public Fragment getItem(int position) {
switch (position) {
case Pages.COMIC_LIST:
return new ComicListFragment();
case Pages.COMIC_VIEWER:
return new ComicViewerFragment();
default:
return null;
}
}
@Override
public int getCount() {
return Pages.NUMBER_ITEMS;
}
//TODO improve this
@Override
public CharSequence getPageTitle(int position) {
if (position == 0) {
return "List";
}
if (position == 1) {
return "Viewer";
}
return "Page " + position;
}
}
|
app/src/main/java/de/eightbitboy/hijacr/fragments/PagerFragmentAdapter.java
|
package de.eightbitboy.hijacr.fragments;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
public class PagerFragmentAdapter extends FragmentPagerAdapter {
public class Pages {
public static final int COMIC_LIST = 0;
public static final int COMIC_VIEWER = 1;
private static final int NUMBER_ITEMS = 2;
}
public PagerFragmentAdapter(FragmentManager fragmentManager) {
super(fragmentManager);
}
@Override
public Fragment getItem(int position) {
switch (position) {
case Pages.COMIC_LIST:
return new ComicListFragment();
case Pages.COMIC_VIEWER:
return new ComicViewerFragment();
default:
return null;
}
}
@Override
public int getCount() {
return Pages.NUMBER_ITEMS;
}
@Override
public CharSequence getPageTitle(int position) {
return "Page " + position;
}
}
|
Adjust tab titles
|
app/src/main/java/de/eightbitboy/hijacr/fragments/PagerFragmentAdapter.java
|
Adjust tab titles
|
|
Java
|
apache-2.0
|
6b07f42ecd90148fce31aaa9910c1a3bede92a87
| 0
|
lindong28/kafka,lindong28/kafka,lindong28/kafka,guozhangwang/kafka,guozhangwang/kafka,apache/kafka,apache/kafka,guozhangwang/kafka,guozhangwang/kafka,lindong28/kafka,apache/kafka,apache/kafka
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.producer;
import org.apache.kafka.clients.ClientDnsLookup;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.internals.DefaultPartitioner;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Importance;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.config.SecurityConfig;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.record.CompressionType;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.kafka.common.config.ConfigDef.Range.atLeast;
import static org.apache.kafka.common.config.ConfigDef.Range.between;
import static org.apache.kafka.common.config.ConfigDef.ValidString.in;
/**
* Configuration for the Kafka Producer. Documentation for these configurations can be found in the <a
* href="http://kafka.apache.org/documentation.html#producerconfigs">Kafka documentation</a>
*/
public class ProducerConfig extends AbstractConfig {
private static final Logger log = LoggerFactory.getLogger(ProducerConfig.class);
/*
* NOTE: DO NOT CHANGE EITHER CONFIG STRINGS OR THEIR JAVA VARIABLE NAMES AS THESE ARE PART OF THE PUBLIC API AND
* CHANGE WILL BREAK USER CODE.
*/
private static final ConfigDef CONFIG;
/** <code>bootstrap.servers</code> */
public static final String BOOTSTRAP_SERVERS_CONFIG = CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG;
/** <code>client.dns.lookup</code> */
public static final String CLIENT_DNS_LOOKUP_CONFIG = CommonClientConfigs.CLIENT_DNS_LOOKUP_CONFIG;
/** <code>metadata.max.age.ms</code> */
public static final String METADATA_MAX_AGE_CONFIG = CommonClientConfigs.METADATA_MAX_AGE_CONFIG;
private static final String METADATA_MAX_AGE_DOC = CommonClientConfigs.METADATA_MAX_AGE_DOC;
/** <code>metadata.max.idle.ms</code> */
public static final String METADATA_MAX_IDLE_CONFIG = "metadata.max.idle.ms";
private static final String METADATA_MAX_IDLE_DOC =
"Controls how long the producer will cache metadata for a topic that's idle. If the elapsed " +
"time since a topic was last produced to exceeds the metadata idle duration, then the topic's " +
"metadata is forgotten and the next access to it will force a metadata fetch request.";
/** <code>batch.size</code> */
public static final String BATCH_SIZE_CONFIG = "batch.size";
private static final String BATCH_SIZE_DOC = "The producer will attempt to batch records together into fewer requests whenever multiple records are being sent"
+ " to the same partition. This helps performance on both the client and the server. This configuration controls the "
+ "default batch size in bytes. "
+ "<p>"
+ "No attempt will be made to batch records larger than this size. "
+ "<p>"
+ "Requests sent to brokers will contain multiple batches, one for each partition with data available to be sent. "
+ "<p>"
+ "A small batch size will make batching less common and may reduce throughput (a batch size of zero will disable "
+ "batching entirely). A very large batch size may use memory a bit more wastefully as we will always allocate a "
+ "buffer of the specified batch size in anticipation of additional records."
+ "<p>"
+ "Note: This setting gives the upper bound of the batch size to be sent. If we have fewer than this many bytes accumulated "
+ "for this partition, we will 'linger' for the <code>linger.ms</code> time waiting for more records to show up. "
+ "This <code>linger.ms</code> setting defaults to 0, which means we'll immediately send out a record even the accumulated "
+ "batch size is under this <code>batch.size</code> setting.";
/** <code>acks</code> */
public static final String ACKS_CONFIG = "acks";
private static final String ACKS_DOC = "The number of acknowledgments the producer requires the leader to have received before considering a request complete. This controls the "
+ " durability of records that are sent. The following settings are allowed: "
+ " <ul>"
+ " <li><code>acks=0</code> If set to zero then the producer will not wait for any acknowledgment from the"
+ " server at all. The record will be immediately added to the socket buffer and considered sent. No guarantee can be"
+ " made that the server has received the record in this case, and the <code>retries</code> configuration will not"
+ " take effect (as the client won't generally know of any failures). The offset given back for each record will"
+ " always be set to <code>-1</code>."
+ " <li><code>acks=1</code> This will mean the leader will write the record to its local log but will respond"
+ " without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after"
+ " acknowledging the record but before the followers have replicated it then the record will be lost."
+ " <li><code>acks=all</code> This means the leader will wait for the full set of in-sync replicas to"
+ " acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica"
+ " remains alive. This is the strongest available guarantee. This is equivalent to the acks=-1 setting."
+ "</ul>"
+ "<p>"
+ "Note that enabling idempotence requires this config value to be 'all'."
+ " If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled.";
/** <code>linger.ms</code> */
public static final String LINGER_MS_CONFIG = "linger.ms";
private static final String LINGER_MS_DOC = "The producer groups together any records that arrive in between request transmissions into a single batched request. "
+ "Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to "
+ "reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount "
+ "of artificial delay—that is, rather than immediately sending out a record, the producer will wait for up to "
+ "the given delay to allow other records to be sent so that the sends can be batched together. This can be thought "
+ "of as analogous to Nagle's algorithm in TCP. This setting gives the upper bound on the delay for batching: once "
+ "we get <code>" + BATCH_SIZE_CONFIG + "</code> worth of records for a partition it will be sent immediately regardless of this "
+ "setting, however if we have fewer than this many bytes accumulated for this partition we will 'linger' for the "
+ "specified time waiting for more records to show up. This setting defaults to 0 (i.e. no delay). Setting <code>" + LINGER_MS_CONFIG + "=5</code>, "
+ "for example, would have the effect of reducing the number of requests sent but would add up to 5ms of latency to records sent in the absence of load.";
/** <code>request.timeout.ms</code> */
public static final String REQUEST_TIMEOUT_MS_CONFIG = CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG;
private static final String REQUEST_TIMEOUT_MS_DOC = CommonClientConfigs.REQUEST_TIMEOUT_MS_DOC
+ " This should be larger than <code>replica.lag.time.max.ms</code> (a broker configuration)"
+ " to reduce the possibility of message duplication due to unnecessary producer retries.";
/** <code>delivery.timeout.ms</code> */
public static final String DELIVERY_TIMEOUT_MS_CONFIG = "delivery.timeout.ms";
private static final String DELIVERY_TIMEOUT_MS_DOC = "An upper bound on the time to report success or failure "
+ "after a call to <code>send()</code> returns. This limits the total time that a record will be delayed "
+ "prior to sending, the time to await acknowledgement from the broker (if expected), and the time allowed "
+ "for retriable send failures. The producer may report failure to send a record earlier than this config if "
+ "either an unrecoverable error is encountered, the retries have been exhausted, "
+ "or the record is added to a batch which reached an earlier delivery expiration deadline. "
+ "The value of this config should be greater than or equal to the sum of <code>" + REQUEST_TIMEOUT_MS_CONFIG + "</code> "
+ "and <code>" + LINGER_MS_CONFIG + "</code>.";
/** <code>client.id</code> */
public static final String CLIENT_ID_CONFIG = CommonClientConfigs.CLIENT_ID_CONFIG;
/** <code>send.buffer.bytes</code> */
public static final String SEND_BUFFER_CONFIG = CommonClientConfigs.SEND_BUFFER_CONFIG;
/** <code>receive.buffer.bytes</code> */
public static final String RECEIVE_BUFFER_CONFIG = CommonClientConfigs.RECEIVE_BUFFER_CONFIG;
/** <code>max.request.size</code> */
public static final String MAX_REQUEST_SIZE_CONFIG = "max.request.size";
private static final String MAX_REQUEST_SIZE_DOC =
"The maximum size of a request in bytes. This setting will limit the number of record " +
"batches the producer will send in a single request to avoid sending huge requests. " +
"This is also effectively a cap on the maximum uncompressed record batch size. Note that the server " +
"has its own cap on the record batch size (after compression if compression is enabled) which may be different from this.";
/** <code>reconnect.backoff.ms</code> */
public static final String RECONNECT_BACKOFF_MS_CONFIG = CommonClientConfigs.RECONNECT_BACKOFF_MS_CONFIG;
/** <code>reconnect.backoff.max.ms</code> */
public static final String RECONNECT_BACKOFF_MAX_MS_CONFIG = CommonClientConfigs.RECONNECT_BACKOFF_MAX_MS_CONFIG;
/** <code>max.block.ms</code> */
public static final String MAX_BLOCK_MS_CONFIG = "max.block.ms";
private static final String MAX_BLOCK_MS_DOC = "The configuration controls how long the <code>KafkaProducer</code>'s <code>send()</code>, <code>partitionsFor()</code>, "
+ "<code>initTransactions()</code>, <code>sendOffsetsToTransaction()</code>, <code>commitTransaction()</code> "
+ "and <code>abortTransaction()</code> methods will block. "
+ "For <code>send()</code> this timeout bounds the total time waiting for both metadata fetch and buffer allocation "
+ "(blocking in the user-supplied serializers or partitioner is not counted against this timeout). "
+ "For <code>partitionsFor()</code> this timeout bounds the time spent waiting for metadata if it is unavailable. "
+ "The transaction-related methods always block, but may timeout if "
+ "the transaction coordinator could not be discovered or did not respond within the timeout.";
/** <code>buffer.memory</code> */
public static final String BUFFER_MEMORY_CONFIG = "buffer.memory";
private static final String BUFFER_MEMORY_DOC = "The total bytes of memory the producer can use to buffer records waiting to be sent to the server. If records are "
+ "sent faster than they can be delivered to the server the producer will block for <code>" + MAX_BLOCK_MS_CONFIG + "</code> after which it will throw an exception."
+ "<p>"
+ "This setting should correspond roughly to the total memory the producer will use, but is not a hard bound since "
+ "not all memory the producer uses is used for buffering. Some additional memory will be used for compression (if "
+ "compression is enabled) as well as for maintaining in-flight requests.";
/** <code>retry.backoff.ms</code> */
public static final String RETRY_BACKOFF_MS_CONFIG = CommonClientConfigs.RETRY_BACKOFF_MS_CONFIG;
/** <code>compression.type</code> */
public static final String COMPRESSION_TYPE_CONFIG = "compression.type";
private static final String COMPRESSION_TYPE_DOC = "The compression type for all data generated by the producer. The default is none (i.e. no compression). Valid "
+ " values are <code>none</code>, <code>gzip</code>, <code>snappy</code>, <code>lz4</code>, or <code>zstd</code>. "
+ "Compression is of full batches of data, so the efficacy of batching will also impact the compression ratio (more batching means better compression).";
/** <code>metrics.sample.window.ms</code> */
public static final String METRICS_SAMPLE_WINDOW_MS_CONFIG = CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_CONFIG;
/** <code>metrics.num.samples</code> */
public static final String METRICS_NUM_SAMPLES_CONFIG = CommonClientConfigs.METRICS_NUM_SAMPLES_CONFIG;
/**
* <code>metrics.recording.level</code>
*/
public static final String METRICS_RECORDING_LEVEL_CONFIG = CommonClientConfigs.METRICS_RECORDING_LEVEL_CONFIG;
/** <code>metric.reporters</code> */
public static final String METRIC_REPORTER_CLASSES_CONFIG = CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG;
// max.in.flight.requests.per.connection should be less than or equal to 5 when idempotence producer enabled to ensure message ordering
private static final int MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE = 5;
/** <code>max.in.flight.requests.per.connection</code> */
public static final String MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION = "max.in.flight.requests.per.connection";
private static final String MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DOC = "The maximum number of unacknowledged requests the client will send on a single connection before blocking."
+ " Note that if this config is set to be greater than 1 and <code>enable.idempotence</code> is set to false, there is a risk of"
+ " message re-ordering after a failed send due to retries (i.e., if retries are enabled)."
+ " Additionally, enabling idempotence requires this config value to be less than or equal to " + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE + "."
+ " If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled.";
/** <code>retries</code> */
public static final String RETRIES_CONFIG = CommonClientConfigs.RETRIES_CONFIG;
private static final String RETRIES_DOC = "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error."
+ " Note that this retry is no different than if the client resent the record upon receiving the error."
+ " Produce requests will be failed before the number of retries has been exhausted if the timeout configured by"
+ " <code>" + DELIVERY_TIMEOUT_MS_CONFIG + "</code> expires first before successful acknowledgement. Users should generally"
+ " prefer to leave this config unset and instead use <code>" + DELIVERY_TIMEOUT_MS_CONFIG + "</code> to control"
+ " retry behavior."
+ "<p>"
+ "Enabling idempotence requires this config value to be greater than 0."
+ " If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled."
+ "<p>"
+ "Allowing retries while setting <code>enable.idempotence</code> to <code>false</code> and <code>" + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION + "</code> to 1 will potentially change the"
+ " ordering of records because if two batches are sent to a single partition, and the first fails and is retried but the second"
+ " succeeds, then the records in the second batch may appear first.";
/** <code>key.serializer</code> */
public static final String KEY_SERIALIZER_CLASS_CONFIG = "key.serializer";
public static final String KEY_SERIALIZER_CLASS_DOC = "Serializer class for key that implements the <code>org.apache.kafka.common.serialization.Serializer</code> interface.";
/** <code>value.serializer</code> */
public static final String VALUE_SERIALIZER_CLASS_CONFIG = "value.serializer";
public static final String VALUE_SERIALIZER_CLASS_DOC = "Serializer class for value that implements the <code>org.apache.kafka.common.serialization.Serializer</code> interface.";
/** <code>socket.connection.setup.timeout.ms</code> */
public static final String SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG = CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG;
/** <code>socket.connection.setup.timeout.max.ms</code> */
public static final String SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG = CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG;
/** <code>connections.max.idle.ms</code> */
public static final String CONNECTIONS_MAX_IDLE_MS_CONFIG = CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_CONFIG;
/** <code>partitioner.class</code> */
public static final String PARTITIONER_CLASS_CONFIG = "partitioner.class";
private static final String PARTITIONER_CLASS_DOC = "A class to use to determine which partition to be send to when produce the records. Available options are:" +
"<ul>" +
"<li><code>org.apache.kafka.clients.producer.internals.DefaultPartitioner</code>: The default partitioner. " +
"This strategy will try sticking to a partition until the batch is full, or <code>linger.ms</code> is up. It works with the strategy:" +
"<ul>" +
"<li>If no partition is specified but a key is present, choose a partition based on a hash of the key</li>" +
"<li>If no partition or key is present, choose the sticky partition that changes when the batch is full, or <code>linger.ms</code> is up.</li>" +
"</ul>" +
"</li>" +
"<li><code>org.apache.kafka.clients.producer.RoundRobinPartitioner</code>: This partitioning strategy is that " +
"each record in a series of consecutive records will be sent to a different partition(no matter if the 'key' is provided or not), " +
"until we run out of partitions and start over again. Note: There's a known issue that will cause uneven distribution when new batch is created. " +
"Please check KAFKA-9965 for more detail." +
"</li>" +
"<li><code>org.apache.kafka.clients.producer.UniformStickyPartitioner</code>: This partitioning strategy will " +
"try sticking to a partition(no matter if the 'key' is provided or not) until the batch is full, or <code>linger.ms</code> is up." +
"</li>" +
"</ul>" +
"<p>Implementing the <code>org.apache.kafka.clients.producer.Partitioner</code> interface allows you to plug in a custom partitioner.";
/** <code>interceptor.classes</code> */
public static final String INTERCEPTOR_CLASSES_CONFIG = "interceptor.classes";
public static final String INTERCEPTOR_CLASSES_DOC = "A list of classes to use as interceptors. "
+ "Implementing the <code>org.apache.kafka.clients.producer.ProducerInterceptor</code> interface allows you to intercept (and possibly mutate) the records "
+ "received by the producer before they are published to the Kafka cluster. By default, there are no interceptors.";
/** <code>enable.idempotence</code> */
public static final String ENABLE_IDEMPOTENCE_CONFIG = "enable.idempotence";
public static final String ENABLE_IDEMPOTENCE_DOC = "When set to 'true', the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer "
+ "retries due to broker failures, etc., may write duplicates of the retried message in the stream. "
+ "Note that enabling idempotence requires <code>" + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION + "</code> to be less than or equal to " + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE
+ " (with message ordering preserved for any allowable value), <code>" + RETRIES_CONFIG + "</code> to be greater than 0, and <code>"
+ ACKS_CONFIG + "</code> must be 'all'. "
+ "<p>"
+ "Idempotence is enabled by default if no conflicting configurations are set. "
+ "If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled. "
+ "If idempotence is explicitly enabled and conflicting configurations are set, a <code>ConfigException</code> is thrown.";
/** <code> transaction.timeout.ms </code> */
public static final String TRANSACTION_TIMEOUT_CONFIG = "transaction.timeout.ms";
public static final String TRANSACTION_TIMEOUT_DOC = "The maximum amount of time in ms that the transaction coordinator will wait for a transaction status update from the producer before proactively aborting the ongoing transaction." +
"If this value is larger than the transaction.max.timeout.ms setting in the broker, the request will fail with a <code>InvalidTxnTimeoutException</code> error.";
/** <code> transactional.id </code> */
public static final String TRANSACTIONAL_ID_CONFIG = "transactional.id";
public static final String TRANSACTIONAL_ID_DOC = "The TransactionalId to use for transactional delivery. This enables reliability semantics which span multiple producer sessions since it allows the client to guarantee that transactions using the same TransactionalId have been completed prior to starting any new transactions. If no TransactionalId is provided, then the producer is limited to idempotent delivery. " +
"If a TransactionalId is configured, <code>enable.idempotence</code> is implied. " +
"By default the TransactionId is not configured, which means transactions cannot be used. " +
"Note that, by default, transactions require a cluster of at least three brokers which is the recommended setting for production; for development you can change this, by adjusting broker setting <code>transaction.state.log.replication.factor</code>.";
/**
* <code>security.providers</code>
*/
public static final String SECURITY_PROVIDERS_CONFIG = SecurityConfig.SECURITY_PROVIDERS_CONFIG;
private static final String SECURITY_PROVIDERS_DOC = SecurityConfig.SECURITY_PROVIDERS_DOC;
private static final AtomicInteger PRODUCER_CLIENT_ID_SEQUENCE = new AtomicInteger(1);
static {
CONFIG = new ConfigDef().define(BOOTSTRAP_SERVERS_CONFIG, Type.LIST, Collections.emptyList(), new ConfigDef.NonNullValidator(), Importance.HIGH, CommonClientConfigs.BOOTSTRAP_SERVERS_DOC)
.define(CLIENT_DNS_LOOKUP_CONFIG,
Type.STRING,
ClientDnsLookup.USE_ALL_DNS_IPS.toString(),
in(ClientDnsLookup.USE_ALL_DNS_IPS.toString(),
ClientDnsLookup.RESOLVE_CANONICAL_BOOTSTRAP_SERVERS_ONLY.toString()),
Importance.MEDIUM,
CommonClientConfigs.CLIENT_DNS_LOOKUP_DOC)
.define(BUFFER_MEMORY_CONFIG, Type.LONG, 32 * 1024 * 1024L, atLeast(0L), Importance.HIGH, BUFFER_MEMORY_DOC)
.define(RETRIES_CONFIG, Type.INT, Integer.MAX_VALUE, between(0, Integer.MAX_VALUE), Importance.HIGH, RETRIES_DOC)
.define(ACKS_CONFIG,
Type.STRING,
"all",
in("all", "-1", "0", "1"),
Importance.LOW,
ACKS_DOC)
.define(COMPRESSION_TYPE_CONFIG, Type.STRING, CompressionType.NONE.name, in(Utils.enumOptions(CompressionType.class)), Importance.HIGH, COMPRESSION_TYPE_DOC)
.define(BATCH_SIZE_CONFIG, Type.INT, 16384, atLeast(0), Importance.MEDIUM, BATCH_SIZE_DOC)
.define(LINGER_MS_CONFIG, Type.LONG, 0, atLeast(0), Importance.MEDIUM, LINGER_MS_DOC)
.define(DELIVERY_TIMEOUT_MS_CONFIG, Type.INT, 120 * 1000, atLeast(0), Importance.MEDIUM, DELIVERY_TIMEOUT_MS_DOC)
.define(CLIENT_ID_CONFIG, Type.STRING, "", Importance.MEDIUM, CommonClientConfigs.CLIENT_ID_DOC)
.define(SEND_BUFFER_CONFIG, Type.INT, 128 * 1024, atLeast(CommonClientConfigs.SEND_BUFFER_LOWER_BOUND), Importance.MEDIUM, CommonClientConfigs.SEND_BUFFER_DOC)
.define(RECEIVE_BUFFER_CONFIG, Type.INT, 32 * 1024, atLeast(CommonClientConfigs.RECEIVE_BUFFER_LOWER_BOUND), Importance.MEDIUM, CommonClientConfigs.RECEIVE_BUFFER_DOC)
.define(MAX_REQUEST_SIZE_CONFIG,
Type.INT,
1024 * 1024,
atLeast(0),
Importance.MEDIUM,
MAX_REQUEST_SIZE_DOC)
.define(RECONNECT_BACKOFF_MS_CONFIG, Type.LONG, 50L, atLeast(0L), Importance.LOW, CommonClientConfigs.RECONNECT_BACKOFF_MS_DOC)
.define(RECONNECT_BACKOFF_MAX_MS_CONFIG, Type.LONG, 1000L, atLeast(0L), Importance.LOW, CommonClientConfigs.RECONNECT_BACKOFF_MAX_MS_DOC)
.define(RETRY_BACKOFF_MS_CONFIG, Type.LONG, 100L, atLeast(0L), Importance.LOW, CommonClientConfigs.RETRY_BACKOFF_MS_DOC)
.define(MAX_BLOCK_MS_CONFIG,
Type.LONG,
60 * 1000,
atLeast(0),
Importance.MEDIUM,
MAX_BLOCK_MS_DOC)
.define(REQUEST_TIMEOUT_MS_CONFIG,
Type.INT,
30 * 1000,
atLeast(0),
Importance.MEDIUM,
REQUEST_TIMEOUT_MS_DOC)
.define(METADATA_MAX_AGE_CONFIG, Type.LONG, 5 * 60 * 1000, atLeast(0), Importance.LOW, METADATA_MAX_AGE_DOC)
.define(METADATA_MAX_IDLE_CONFIG,
Type.LONG,
5 * 60 * 1000,
atLeast(5000),
Importance.LOW,
METADATA_MAX_IDLE_DOC)
.define(METRICS_SAMPLE_WINDOW_MS_CONFIG,
Type.LONG,
30000,
atLeast(0),
Importance.LOW,
CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_DOC)
.define(METRICS_NUM_SAMPLES_CONFIG, Type.INT, 2, atLeast(1), Importance.LOW, CommonClientConfigs.METRICS_NUM_SAMPLES_DOC)
.define(METRICS_RECORDING_LEVEL_CONFIG,
Type.STRING,
Sensor.RecordingLevel.INFO.toString(),
in(Sensor.RecordingLevel.INFO.toString(), Sensor.RecordingLevel.DEBUG.toString(), Sensor.RecordingLevel.TRACE.toString()),
Importance.LOW,
CommonClientConfigs.METRICS_RECORDING_LEVEL_DOC)
.define(METRIC_REPORTER_CLASSES_CONFIG,
Type.LIST,
Collections.emptyList(),
new ConfigDef.NonNullValidator(),
Importance.LOW,
CommonClientConfigs.METRIC_REPORTER_CLASSES_DOC)
.define(MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION,
Type.INT,
5,
atLeast(1),
Importance.LOW,
MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DOC)
.define(KEY_SERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
KEY_SERIALIZER_CLASS_DOC)
.define(VALUE_SERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
VALUE_SERIALIZER_CLASS_DOC)
.define(SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG,
Type.LONG,
CommonClientConfigs.DEFAULT_SOCKET_CONNECTION_SETUP_TIMEOUT_MS,
Importance.MEDIUM,
CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_DOC)
.define(SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG,
Type.LONG,
CommonClientConfigs.DEFAULT_SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS,
Importance.MEDIUM,
CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_DOC)
/* default is set to be a bit lower than the server default (10 min), to avoid both client and server closing connection at same time */
.define(CONNECTIONS_MAX_IDLE_MS_CONFIG,
Type.LONG,
9 * 60 * 1000,
Importance.MEDIUM,
CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_DOC)
.define(PARTITIONER_CLASS_CONFIG,
Type.CLASS,
DefaultPartitioner.class,
Importance.MEDIUM, PARTITIONER_CLASS_DOC)
.define(INTERCEPTOR_CLASSES_CONFIG,
Type.LIST,
Collections.emptyList(),
new ConfigDef.NonNullValidator(),
Importance.LOW,
INTERCEPTOR_CLASSES_DOC)
.define(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
Type.STRING,
CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL,
Importance.MEDIUM,
CommonClientConfigs.SECURITY_PROTOCOL_DOC)
.define(SECURITY_PROVIDERS_CONFIG,
Type.STRING,
null,
Importance.LOW,
SECURITY_PROVIDERS_DOC)
.withClientSslSupport()
.withClientSaslSupport()
.define(ENABLE_IDEMPOTENCE_CONFIG,
Type.BOOLEAN,
true,
Importance.LOW,
ENABLE_IDEMPOTENCE_DOC)
.define(TRANSACTION_TIMEOUT_CONFIG,
Type.INT,
60000,
Importance.LOW,
TRANSACTION_TIMEOUT_DOC)
.define(TRANSACTIONAL_ID_CONFIG,
Type.STRING,
null,
new ConfigDef.NonEmptyString(),
Importance.LOW,
TRANSACTIONAL_ID_DOC);
}
@Override
protected Map<String, Object> postProcessParsedConfig(final Map<String, Object> parsedValues) {
Map<String, Object> refinedConfigs = CommonClientConfigs.postProcessReconnectBackoffConfigs(this, parsedValues);
postProcessAndValidateIdempotenceConfigs(refinedConfigs);
maybeOverrideClientId(refinedConfigs);
return refinedConfigs;
}
private void maybeOverrideClientId(final Map<String, Object> configs) {
String refinedClientId;
boolean userConfiguredClientId = this.originals().containsKey(CLIENT_ID_CONFIG);
if (userConfiguredClientId) {
refinedClientId = this.getString(CLIENT_ID_CONFIG);
} else {
String transactionalId = this.getString(TRANSACTIONAL_ID_CONFIG);
refinedClientId = "producer-" + (transactionalId != null ? transactionalId : PRODUCER_CLIENT_ID_SEQUENCE.getAndIncrement());
}
configs.put(CLIENT_ID_CONFIG, refinedClientId);
}
private void postProcessAndValidateIdempotenceConfigs(final Map<String, Object> configs) {
final Map<String, Object> originalConfigs = this.originals();
final String acksStr = parseAcks(this.getString(ACKS_CONFIG));
configs.put(ACKS_CONFIG, acksStr);
final boolean userConfiguredIdempotence = this.originals().containsKey(ENABLE_IDEMPOTENCE_CONFIG);
boolean idempotenceEnabled = this.getBoolean(ENABLE_IDEMPOTENCE_CONFIG);
boolean shouldDisableIdempotence = false;
// For idempotence producers, values for `retries` and `acks` and `max.in.flight.requests.per.connection` need validation
if (idempotenceEnabled) {
final int retries = this.getInt(RETRIES_CONFIG);
if (retries == 0) {
if (userConfiguredIdempotence) {
throw new ConfigException("Must set " + RETRIES_CONFIG + " to non-zero when using the idempotent producer.");
}
log.info("Idempotence will be disabled because {} is set to 0.", RETRIES_CONFIG);
shouldDisableIdempotence = true;
}
final short acks = Short.parseShort(acksStr);
if (acks != (short) -1) {
if (userConfiguredIdempotence) {
throw new ConfigException("Must set " + ACKS_CONFIG + " to all in order to use the idempotent " +
"producer. Otherwise we cannot guarantee idempotence.");
}
log.info("Idempotence will be disabled because {} is set to {}, not set to 'all'.", ACKS_CONFIG, acks);
shouldDisableIdempotence = true;
}
final int inFlightConnection = this.getInt(MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION);
if (MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE < inFlightConnection) {
if (userConfiguredIdempotence) {
throw new ConfigException("Must set " + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION + " to at most 5" +
" to use the idempotent producer.");
}
log.warn("Idempotence will be disabled because {} is set to {}, which is greater than 5. " +
"Please note that in v4.0.0 and onward, this will become an error.", MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, inFlightConnection);
shouldDisableIdempotence = true;
}
}
if (shouldDisableIdempotence) {
configs.put(ENABLE_IDEMPOTENCE_CONFIG, false);
idempotenceEnabled = false;
}
// validate `transaction.id` after validating idempotence dependant configs because `enable.idempotence` config might be overridden
boolean userConfiguredTransactions = originalConfigs.containsKey(TRANSACTIONAL_ID_CONFIG);
if (!idempotenceEnabled && userConfiguredTransactions) {
throw new ConfigException("Cannot set a " + ProducerConfig.TRANSACTIONAL_ID_CONFIG + " without also enabling idempotence.");
}
}
private static String parseAcks(String acksString) {
try {
return acksString.trim().equalsIgnoreCase("all") ? "-1" : Short.parseShort(acksString.trim()) + "";
} catch (NumberFormatException e) {
throw new ConfigException("Invalid configuration value for 'acks': " + acksString);
}
}
static Map<String, Object> appendSerializerToConfig(Map<String, Object> configs,
Serializer<?> keySerializer,
Serializer<?> valueSerializer) {
Map<String, Object> newConfigs = new HashMap<>(configs);
if (keySerializer != null)
newConfigs.put(KEY_SERIALIZER_CLASS_CONFIG, keySerializer.getClass());
if (valueSerializer != null)
newConfigs.put(VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer.getClass());
return newConfigs;
}
public ProducerConfig(Properties props) {
super(CONFIG, props);
}
public ProducerConfig(Map<String, Object> props) {
super(CONFIG, props);
}
ProducerConfig(Map<?, ?> props, boolean doLog) {
super(CONFIG, props, doLog);
}
public static Set<String> configNames() {
return CONFIG.names();
}
public static ConfigDef configDef() {
return new ConfigDef(CONFIG);
}
public static void main(String[] args) {
System.out.println(CONFIG.toHtml(4, config -> "producerconfigs_" + config));
}
}
|
clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.clients.producer;
import org.apache.kafka.clients.ClientDnsLookup;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.internals.DefaultPartitioner;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Importance;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.config.SecurityConfig;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.record.CompressionType;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import static org.apache.kafka.common.config.ConfigDef.Range.atLeast;
import static org.apache.kafka.common.config.ConfigDef.Range.between;
import static org.apache.kafka.common.config.ConfigDef.ValidString.in;
/**
* Configuration for the Kafka Producer. Documentation for these configurations can be found in the <a
* href="http://kafka.apache.org/documentation.html#producerconfigs">Kafka documentation</a>
*/
public class ProducerConfig extends AbstractConfig {
private static final Logger log = LoggerFactory.getLogger(ProducerConfig.class);
/*
* NOTE: DO NOT CHANGE EITHER CONFIG STRINGS OR THEIR JAVA VARIABLE NAMES AS THESE ARE PART OF THE PUBLIC API AND
* CHANGE WILL BREAK USER CODE.
*/
private static final ConfigDef CONFIG;
/** <code>bootstrap.servers</code> */
public static final String BOOTSTRAP_SERVERS_CONFIG = CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG;
/** <code>client.dns.lookup</code> */
public static final String CLIENT_DNS_LOOKUP_CONFIG = CommonClientConfigs.CLIENT_DNS_LOOKUP_CONFIG;
/** <code>metadata.max.age.ms</code> */
public static final String METADATA_MAX_AGE_CONFIG = CommonClientConfigs.METADATA_MAX_AGE_CONFIG;
private static final String METADATA_MAX_AGE_DOC = CommonClientConfigs.METADATA_MAX_AGE_DOC;
/** <code>metadata.max.idle.ms</code> */
public static final String METADATA_MAX_IDLE_CONFIG = "metadata.max.idle.ms";
private static final String METADATA_MAX_IDLE_DOC =
"Controls how long the producer will cache metadata for a topic that's idle. If the elapsed " +
"time since a topic was last produced to exceeds the metadata idle duration, then the topic's " +
"metadata is forgotten and the next access to it will force a metadata fetch request.";
/** <code>batch.size</code> */
public static final String BATCH_SIZE_CONFIG = "batch.size";
private static final String BATCH_SIZE_DOC = "The producer will attempt to batch records together into fewer requests whenever multiple records are being sent"
+ " to the same partition. This helps performance on both the client and the server. This configuration controls the "
+ "default batch size in bytes. "
+ "<p>"
+ "No attempt will be made to batch records larger than this size. "
+ "<p>"
+ "Requests sent to brokers will contain multiple batches, one for each partition with data available to be sent. "
+ "<p>"
+ "A small batch size will make batching less common and may reduce throughput (a batch size of zero will disable "
+ "batching entirely). A very large batch size may use memory a bit more wastefully as we will always allocate a "
+ "buffer of the specified batch size in anticipation of additional records."
+ "<p>"
+ "Note: This setting gives the upper bound of the batch size to be sent. If we have fewer than this many bytes accumulated "
+ "for this partition, we will 'linger' for the <code>linger.ms</code> time waiting for more records to show up. "
+ "This <code>linger.ms</code> setting defaults to 0, which means we'll immediately send out a record even the accumulated "
+ "batch size is under this <code>batch.size</code> setting.";
/** <code>acks</code> */
public static final String ACKS_CONFIG = "acks";
private static final String ACKS_DOC = "The number of acknowledgments the producer requires the leader to have received before considering a request complete. This controls the "
+ " durability of records that are sent. The following settings are allowed: "
+ " <ul>"
+ " <li><code>acks=0</code> If set to zero then the producer will not wait for any acknowledgment from the"
+ " server at all. The record will be immediately added to the socket buffer and considered sent. No guarantee can be"
+ " made that the server has received the record in this case, and the <code>retries</code> configuration will not"
+ " take effect (as the client won't generally know of any failures). The offset given back for each record will"
+ " always be set to <code>-1</code>."
+ " <li><code>acks=1</code> This will mean the leader will write the record to its local log but will respond"
+ " without awaiting full acknowledgement from all followers. In this case should the leader fail immediately after"
+ " acknowledging the record but before the followers have replicated it then the record will be lost."
+ " <li><code>acks=all</code> This means the leader will wait for the full set of in-sync replicas to"
+ " acknowledge the record. This guarantees that the record will not be lost as long as at least one in-sync replica"
+ " remains alive. This is the strongest available guarantee. This is equivalent to the acks=-1 setting."
+ "</ul>"
+ "<p>"
+ "Note that enabling idempotence requires this config value to be 'all'."
+ " If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled.";
/** <code>linger.ms</code> */
public static final String LINGER_MS_CONFIG = "linger.ms";
private static final String LINGER_MS_DOC = "The producer groups together any records that arrive in between request transmissions into a single batched request. "
+ "Normally this occurs only under load when records arrive faster than they can be sent out. However in some circumstances the client may want to "
+ "reduce the number of requests even under moderate load. This setting accomplishes this by adding a small amount "
+ "of artificial delay—that is, rather than immediately sending out a record, the producer will wait for up to "
+ "the given delay to allow other records to be sent so that the sends can be batched together. This can be thought "
+ "of as analogous to Nagle's algorithm in TCP. This setting gives the upper bound on the delay for batching: once "
+ "we get <code>" + BATCH_SIZE_CONFIG + "</code> worth of records for a partition it will be sent immediately regardless of this "
+ "setting, however if we have fewer than this many bytes accumulated for this partition we will 'linger' for the "
+ "specified time waiting for more records to show up. This setting defaults to 0 (i.e. no delay). Setting <code>" + LINGER_MS_CONFIG + "=5</code>, "
+ "for example, would have the effect of reducing the number of requests sent but would add up to 5ms of latency to records sent in the absence of load.";
/** <code>request.timeout.ms</code> */
public static final String REQUEST_TIMEOUT_MS_CONFIG = CommonClientConfigs.REQUEST_TIMEOUT_MS_CONFIG;
private static final String REQUEST_TIMEOUT_MS_DOC = CommonClientConfigs.REQUEST_TIMEOUT_MS_DOC
+ " This should be larger than <code>replica.lag.time.max.ms</code> (a broker configuration)"
+ " to reduce the possibility of message duplication due to unnecessary producer retries.";
/** <code>delivery.timeout.ms</code> */
public static final String DELIVERY_TIMEOUT_MS_CONFIG = "delivery.timeout.ms";
private static final String DELIVERY_TIMEOUT_MS_DOC = "An upper bound on the time to report success or failure "
+ "after a call to <code>send()</code> returns. This limits the total time that a record will be delayed "
+ "prior to sending, the time to await acknowledgement from the broker (if expected), and the time allowed "
+ "for retriable send failures. The producer may report failure to send a record earlier than this config if "
+ "either an unrecoverable error is encountered, the retries have been exhausted, "
+ "or the record is added to a batch which reached an earlier delivery expiration deadline. "
+ "The value of this config should be greater than or equal to the sum of <code>" + REQUEST_TIMEOUT_MS_CONFIG + "</code> "
+ "and <code>" + LINGER_MS_CONFIG + "</code>.";
/** <code>client.id</code> */
public static final String CLIENT_ID_CONFIG = CommonClientConfigs.CLIENT_ID_CONFIG;
/** <code>send.buffer.bytes</code> */
public static final String SEND_BUFFER_CONFIG = CommonClientConfigs.SEND_BUFFER_CONFIG;
/** <code>receive.buffer.bytes</code> */
public static final String RECEIVE_BUFFER_CONFIG = CommonClientConfigs.RECEIVE_BUFFER_CONFIG;
/** <code>max.request.size</code> */
public static final String MAX_REQUEST_SIZE_CONFIG = "max.request.size";
private static final String MAX_REQUEST_SIZE_DOC =
"The maximum size of a request in bytes. This setting will limit the number of record " +
"batches the producer will send in a single request to avoid sending huge requests. " +
"This is also effectively a cap on the maximum uncompressed record batch size. Note that the server " +
"has its own cap on the record batch size (after compression if compression is enabled) which may be different from this.";
/** <code>reconnect.backoff.ms</code> */
public static final String RECONNECT_BACKOFF_MS_CONFIG = CommonClientConfigs.RECONNECT_BACKOFF_MS_CONFIG;
/** <code>reconnect.backoff.max.ms</code> */
public static final String RECONNECT_BACKOFF_MAX_MS_CONFIG = CommonClientConfigs.RECONNECT_BACKOFF_MAX_MS_CONFIG;
/** <code>max.block.ms</code> */
public static final String MAX_BLOCK_MS_CONFIG = "max.block.ms";
private static final String MAX_BLOCK_MS_DOC = "The configuration controls how long the <code>KafkaProducer</code>'s <code>send()</code>, <code>partitionsFor()</code>, "
+ "<code>initTransactions()</code>, <code>sendOffsetsToTransaction()</code>, <code>commitTransaction()</code> "
+ "and <code>abortTransaction()</code> methods will block. "
+ "For <code>send()</code> this timeout bounds the total time waiting for both metadata fetch and buffer allocation "
+ "(blocking in the user-supplied serializers or partitioner is not counted against this timeout). "
+ "For <code>partitionsFor()</code> this timeout bounds the time spent waiting for metadata if it is unavailable. "
+ "The transaction-related methods always block, but may timeout if "
+ "the transaction coordinator could not be discovered or did not respond within the timeout.";
/** <code>buffer.memory</code> */
public static final String BUFFER_MEMORY_CONFIG = "buffer.memory";
private static final String BUFFER_MEMORY_DOC = "The total bytes of memory the producer can use to buffer records waiting to be sent to the server. If records are "
+ "sent faster than they can be delivered to the server the producer will block for <code>" + MAX_BLOCK_MS_CONFIG + "</code> after which it will throw an exception."
+ "<p>"
+ "This setting should correspond roughly to the total memory the producer will use, but is not a hard bound since "
+ "not all memory the producer uses is used for buffering. Some additional memory will be used for compression (if "
+ "compression is enabled) as well as for maintaining in-flight requests.";
/** <code>retry.backoff.ms</code> */
public static final String RETRY_BACKOFF_MS_CONFIG = CommonClientConfigs.RETRY_BACKOFF_MS_CONFIG;
/** <code>compression.type</code> */
public static final String COMPRESSION_TYPE_CONFIG = "compression.type";
private static final String COMPRESSION_TYPE_DOC = "The compression type for all data generated by the producer. The default is none (i.e. no compression). Valid "
+ " values are <code>none</code>, <code>gzip</code>, <code>snappy</code>, <code>lz4</code>, or <code>zstd</code>. "
+ "Compression is of full batches of data, so the efficacy of batching will also impact the compression ratio (more batching means better compression).";
/** <code>metrics.sample.window.ms</code> */
public static final String METRICS_SAMPLE_WINDOW_MS_CONFIG = CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_CONFIG;
/** <code>metrics.num.samples</code> */
public static final String METRICS_NUM_SAMPLES_CONFIG = CommonClientConfigs.METRICS_NUM_SAMPLES_CONFIG;
/**
* <code>metrics.recording.level</code>
*/
public static final String METRICS_RECORDING_LEVEL_CONFIG = CommonClientConfigs.METRICS_RECORDING_LEVEL_CONFIG;
/** <code>metric.reporters</code> */
public static final String METRIC_REPORTER_CLASSES_CONFIG = CommonClientConfigs.METRIC_REPORTER_CLASSES_CONFIG;
// max.in.flight.requests.per.connection should be less than or equal to 5 when idempotence producer enabled to ensure message ordering
private static final int MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE = 5;
/** <code>max.in.flight.requests.per.connection</code> */
public static final String MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION = "max.in.flight.requests.per.connection";
private static final String MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DOC = "The maximum number of unacknowledged requests the client will send on a single connection before blocking."
+ " Note that if this config is set to be greater than 1 and <code>enable.idempotence</code> is set to false, there is a risk of"
+ " message re-ordering after a failed send due to retries (i.e., if retries are enabled)."
+ " Additionally, enabling idempotence requires this config value to be less than or equal to " + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE + "."
+ " If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled.";
/** <code>retries</code> */
public static final String RETRIES_CONFIG = CommonClientConfigs.RETRIES_CONFIG;
private static final String RETRIES_DOC = "Setting a value greater than zero will cause the client to resend any record whose send fails with a potentially transient error."
+ " Note that this retry is no different than if the client resent the record upon receiving the error."
+ " Produce requests will be failed before the number of retries has been exhausted if the timeout configured by"
+ " <code>" + DELIVERY_TIMEOUT_MS_CONFIG + "</code> expires first before successful acknowledgement. Users should generally"
+ " prefer to leave this config unset and instead use <code>" + DELIVERY_TIMEOUT_MS_CONFIG + "</code> to control"
+ " retry behavior."
+ "<p>"
+ "Enabling idempotence requires this config value to be greater than 0."
+ " If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled."
+ "<p>"
+ "Allowing retries while setting <code>enable.idempotence</code> to <code>false</code> and <code>" + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION + "</code> to 1 will potentially change the"
+ " ordering of records because if two batches are sent to a single partition, and the first fails and is retried but the second"
+ " succeeds, then the records in the second batch may appear first.";
/** <code>key.serializer</code> */
public static final String KEY_SERIALIZER_CLASS_CONFIG = "key.serializer";
public static final String KEY_SERIALIZER_CLASS_DOC = "Serializer class for key that implements the <code>org.apache.kafka.common.serialization.Serializer</code> interface.";
/** <code>value.serializer</code> */
public static final String VALUE_SERIALIZER_CLASS_CONFIG = "value.serializer";
public static final String VALUE_SERIALIZER_CLASS_DOC = "Serializer class for value that implements the <code>org.apache.kafka.common.serialization.Serializer</code> interface.";
/** <code>socket.connection.setup.timeout.ms</code> */
public static final String SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG = CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG;
/** <code>socket.connection.setup.timeout.max.ms</code> */
public static final String SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG = CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG;
/** <code>connections.max.idle.ms</code> */
public static final String CONNECTIONS_MAX_IDLE_MS_CONFIG = CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_CONFIG;
/** <code>partitioner.class</code> */
public static final String PARTITIONER_CLASS_CONFIG = "partitioner.class";
private static final String PARTITIONER_CLASS_DOC = "A class to use to determine which partition to be send to when produce the records. Available options are:" +
"<ul>" +
"<li><code>org.apache.kafka.clients.producer.internals.DefaultPartitioner</code>: The default partitioner. " +
"This strategy will try sticking to a partition until the batch is full, or <code>linger.ms</code> is up. It works with the strategy:" +
"<ul>" +
"<li>If no partition is specified but a key is present, choose a partition based on a hash of the key</li>" +
"<li>If no partition or key is present, choose the sticky partition that changes when the batch is full, or <code>linger.ms</code> is up.</li>" +
"</ul>" +
"</li>" +
"<li><code>org.apache.kafka.clients.producer.RoundRobinPartitioner</code>: This partitioning strategy is that " +
"each record in a series of consecutive records will be sent to a different partition(no matter if the 'key' is provided or not), " +
"until we run out of partitions and start over again. Note: There's a known issue that will cause uneven distribution when new batch is created. " +
"Please check KAFKA-9965 for more detail." +
"</li>" +
"<li><code>org.apache.kafka.clients.producer.UniformStickyPartitioner</code>: This partitioning strategy will " +
"try sticking to a partition(no matter if the 'key' is provided or not) until the batch is full, or <code>linger.ms</code> is up." +
"</li>" +
"</ul>" +
"<p>Implementing the <code>org.apache.kafka.clients.producer.Partitioner</code> interface allows you to plug in a custom partitioner.";
/** <code>interceptor.classes</code> */
public static final String INTERCEPTOR_CLASSES_CONFIG = "interceptor.classes";
public static final String INTERCEPTOR_CLASSES_DOC = "A list of classes to use as interceptors. "
+ "Implementing the <code>org.apache.kafka.clients.producer.ProducerInterceptor</code> interface allows you to intercept (and possibly mutate) the records "
+ "received by the producer before they are published to the Kafka cluster. By default, there are no interceptors.";
/** <code>enable.idempotence</code> */
public static final String ENABLE_IDEMPOTENCE_CONFIG = "enable.idempotence";
public static final String ENABLE_IDEMPOTENCE_DOC = "When set to 'true', the producer will ensure that exactly one copy of each message is written in the stream. If 'false', producer "
+ "retries due to broker failures, etc., may write duplicates of the retried message in the stream. "
+ "Note that enabling idempotence requires <code>" + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION + "</code> to be less than or equal to " + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE
+ " (with message ordering preserved for any allowable value), <code>" + RETRIES_CONFIG + "</code> to be greater than 0, and <code>"
+ ACKS_CONFIG + "</code> must be 'all'. "
+ "<p>"
+ "Idempotence is enabled by default if no conflicting configurations are set. "
+ "If conflicting configurations are set and idempotence is not explicitly enabled, idempotence is disabled. "
+ "If idempotence is explicitly enabled and conflicting configurations are set, a <code>ConfigException</code> is thrown.";
/** <code> transaction.timeout.ms </code> */
public static final String TRANSACTION_TIMEOUT_CONFIG = "transaction.timeout.ms";
public static final String TRANSACTION_TIMEOUT_DOC = "The maximum amount of time in ms that the transaction coordinator will wait for a transaction status update from the producer before proactively aborting the ongoing transaction." +
"If this value is larger than the transaction.max.timeout.ms setting in the broker, the request will fail with a <code>InvalidTxnTimeoutException</code> error.";
/** <code> transactional.id </code> */
public static final String TRANSACTIONAL_ID_CONFIG = "transactional.id";
public static final String TRANSACTIONAL_ID_DOC = "The TransactionalId to use for transactional delivery. This enables reliability semantics which span multiple producer sessions since it allows the client to guarantee that transactions using the same TransactionalId have been completed prior to starting any new transactions. If no TransactionalId is provided, then the producer is limited to idempotent delivery. " +
"If a TransactionalId is configured, <code>enable.idempotence</code> is implied. " +
"By default the TransactionId is not configured, which means transactions cannot be used. " +
"Note that, by default, transactions require a cluster of at least three brokers which is the recommended setting for production; for development you can change this, by adjusting broker setting <code>transaction.state.log.replication.factor</code>.";
/**
* <code>security.providers</code>
*/
public static final String SECURITY_PROVIDERS_CONFIG = SecurityConfig.SECURITY_PROVIDERS_CONFIG;
private static final String SECURITY_PROVIDERS_DOC = SecurityConfig.SECURITY_PROVIDERS_DOC;
private static final AtomicInteger PRODUCER_CLIENT_ID_SEQUENCE = new AtomicInteger(1);
static {
CONFIG = new ConfigDef().define(BOOTSTRAP_SERVERS_CONFIG, Type.LIST, Collections.emptyList(), new ConfigDef.NonNullValidator(), Importance.HIGH, CommonClientConfigs.BOOTSTRAP_SERVERS_DOC)
.define(CLIENT_DNS_LOOKUP_CONFIG,
Type.STRING,
ClientDnsLookup.USE_ALL_DNS_IPS.toString(),
in(ClientDnsLookup.USE_ALL_DNS_IPS.toString(),
ClientDnsLookup.RESOLVE_CANONICAL_BOOTSTRAP_SERVERS_ONLY.toString()),
Importance.MEDIUM,
CommonClientConfigs.CLIENT_DNS_LOOKUP_DOC)
.define(BUFFER_MEMORY_CONFIG, Type.LONG, 32 * 1024 * 1024L, atLeast(0L), Importance.HIGH, BUFFER_MEMORY_DOC)
.define(RETRIES_CONFIG, Type.INT, Integer.MAX_VALUE, between(0, Integer.MAX_VALUE), Importance.HIGH, RETRIES_DOC)
.define(ACKS_CONFIG,
Type.STRING,
"all",
in("all", "-1", "0", "1"),
Importance.LOW,
ACKS_DOC)
.define(COMPRESSION_TYPE_CONFIG, Type.STRING, CompressionType.NONE.name, in(Utils.enumOptions(CompressionType.class)), Importance.HIGH, COMPRESSION_TYPE_DOC)
.define(BATCH_SIZE_CONFIG, Type.INT, 16384, atLeast(0), Importance.MEDIUM, BATCH_SIZE_DOC)
.define(LINGER_MS_CONFIG, Type.LONG, 0, atLeast(0), Importance.MEDIUM, LINGER_MS_DOC)
.define(DELIVERY_TIMEOUT_MS_CONFIG, Type.INT, 120 * 1000, atLeast(0), Importance.MEDIUM, DELIVERY_TIMEOUT_MS_DOC)
.define(CLIENT_ID_CONFIG, Type.STRING, "", Importance.MEDIUM, CommonClientConfigs.CLIENT_ID_DOC)
.define(SEND_BUFFER_CONFIG, Type.INT, 128 * 1024, atLeast(CommonClientConfigs.SEND_BUFFER_LOWER_BOUND), Importance.MEDIUM, CommonClientConfigs.SEND_BUFFER_DOC)
.define(RECEIVE_BUFFER_CONFIG, Type.INT, 32 * 1024, atLeast(CommonClientConfigs.RECEIVE_BUFFER_LOWER_BOUND), Importance.MEDIUM, CommonClientConfigs.RECEIVE_BUFFER_DOC)
.define(MAX_REQUEST_SIZE_CONFIG,
Type.INT,
1024 * 1024,
atLeast(0),
Importance.MEDIUM,
MAX_REQUEST_SIZE_DOC)
.define(RECONNECT_BACKOFF_MS_CONFIG, Type.LONG, 50L, atLeast(0L), Importance.LOW, CommonClientConfigs.RECONNECT_BACKOFF_MS_DOC)
.define(RECONNECT_BACKOFF_MAX_MS_CONFIG, Type.LONG, 1000L, atLeast(0L), Importance.LOW, CommonClientConfigs.RECONNECT_BACKOFF_MAX_MS_DOC)
.define(RETRY_BACKOFF_MS_CONFIG, Type.LONG, 100L, atLeast(0L), Importance.LOW, CommonClientConfigs.RETRY_BACKOFF_MS_DOC)
.define(MAX_BLOCK_MS_CONFIG,
Type.LONG,
60 * 1000,
atLeast(0),
Importance.MEDIUM,
MAX_BLOCK_MS_DOC)
.define(REQUEST_TIMEOUT_MS_CONFIG,
Type.INT,
30 * 1000,
atLeast(0),
Importance.MEDIUM,
REQUEST_TIMEOUT_MS_DOC)
.define(METADATA_MAX_AGE_CONFIG, Type.LONG, 5 * 60 * 1000, atLeast(0), Importance.LOW, METADATA_MAX_AGE_DOC)
.define(METADATA_MAX_IDLE_CONFIG,
Type.LONG,
5 * 60 * 1000,
atLeast(5000),
Importance.LOW,
METADATA_MAX_IDLE_DOC)
.define(METRICS_SAMPLE_WINDOW_MS_CONFIG,
Type.LONG,
30000,
atLeast(0),
Importance.LOW,
CommonClientConfigs.METRICS_SAMPLE_WINDOW_MS_DOC)
.define(METRICS_NUM_SAMPLES_CONFIG, Type.INT, 2, atLeast(1), Importance.LOW, CommonClientConfigs.METRICS_NUM_SAMPLES_DOC)
.define(METRICS_RECORDING_LEVEL_CONFIG,
Type.STRING,
Sensor.RecordingLevel.INFO.toString(),
in(Sensor.RecordingLevel.INFO.toString(), Sensor.RecordingLevel.DEBUG.toString(), Sensor.RecordingLevel.TRACE.toString()),
Importance.LOW,
CommonClientConfigs.METRICS_RECORDING_LEVEL_DOC)
.define(METRIC_REPORTER_CLASSES_CONFIG,
Type.LIST,
Collections.emptyList(),
new ConfigDef.NonNullValidator(),
Importance.LOW,
CommonClientConfigs.METRIC_REPORTER_CLASSES_DOC)
.define(MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION,
Type.INT,
5,
atLeast(1),
Importance.LOW,
MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DOC)
.define(KEY_SERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
KEY_SERIALIZER_CLASS_DOC)
.define(VALUE_SERIALIZER_CLASS_CONFIG,
Type.CLASS,
Importance.HIGH,
VALUE_SERIALIZER_CLASS_DOC)
.define(SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG,
Type.LONG,
CommonClientConfigs.DEFAULT_SOCKET_CONNECTION_SETUP_TIMEOUT_MS,
Importance.MEDIUM,
CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_DOC)
.define(SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG,
Type.LONG,
CommonClientConfigs.DEFAULT_SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS,
Importance.MEDIUM,
CommonClientConfigs.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_DOC)
/* default is set to be a bit lower than the server default (10 min), to avoid both client and server closing connection at same time */
.define(CONNECTIONS_MAX_IDLE_MS_CONFIG,
Type.LONG,
9 * 60 * 1000,
Importance.MEDIUM,
CommonClientConfigs.CONNECTIONS_MAX_IDLE_MS_DOC)
.define(PARTITIONER_CLASS_CONFIG,
Type.CLASS,
DefaultPartitioner.class,
Importance.MEDIUM, PARTITIONER_CLASS_DOC)
.define(INTERCEPTOR_CLASSES_CONFIG,
Type.LIST,
Collections.emptyList(),
new ConfigDef.NonNullValidator(),
Importance.LOW,
INTERCEPTOR_CLASSES_DOC)
.define(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
Type.STRING,
CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL,
Importance.MEDIUM,
CommonClientConfigs.SECURITY_PROTOCOL_DOC)
.define(SECURITY_PROVIDERS_CONFIG,
Type.STRING,
null,
Importance.LOW,
SECURITY_PROVIDERS_DOC)
.withClientSslSupport()
.withClientSaslSupport()
.define(ENABLE_IDEMPOTENCE_CONFIG,
Type.BOOLEAN,
true,
Importance.LOW,
ENABLE_IDEMPOTENCE_DOC)
.define(TRANSACTION_TIMEOUT_CONFIG,
Type.INT,
60000,
Importance.LOW,
TRANSACTION_TIMEOUT_DOC)
.define(TRANSACTIONAL_ID_CONFIG,
Type.STRING,
null,
new ConfigDef.NonEmptyString(),
Importance.LOW,
TRANSACTIONAL_ID_DOC);
}
@Override
protected Map<String, Object> postProcessParsedConfig(final Map<String, Object> parsedValues) {
Map<String, Object> refinedConfigs = CommonClientConfigs.postProcessReconnectBackoffConfigs(this, parsedValues);
postProcessAndValidateIdempotenceConfigs(refinedConfigs);
maybeOverrideClientId(refinedConfigs);
return refinedConfigs;
}
private void maybeOverrideClientId(final Map<String, Object> configs) {
String refinedClientId;
boolean userConfiguredClientId = this.originals().containsKey(CLIENT_ID_CONFIG);
if (userConfiguredClientId) {
refinedClientId = this.getString(CLIENT_ID_CONFIG);
} else {
String transactionalId = this.getString(TRANSACTIONAL_ID_CONFIG);
refinedClientId = "producer-" + (transactionalId != null ? transactionalId : PRODUCER_CLIENT_ID_SEQUENCE.getAndIncrement());
}
configs.put(CLIENT_ID_CONFIG, refinedClientId);
}
private void postProcessAndValidateIdempotenceConfigs(final Map<String, Object> configs) {
final Map<String, Object> originalConfigs = this.originals();
final String acksStr = parseAcks(this.getString(ACKS_CONFIG));
configs.put(ACKS_CONFIG, acksStr);
final boolean userConfiguredIdempotence = this.originals().containsKey(ENABLE_IDEMPOTENCE_CONFIG);
boolean idempotenceEnabled = this.getBoolean(ENABLE_IDEMPOTENCE_CONFIG);
boolean shouldDisableIdempotence = false;
// For idempotence producers, values for `retries` and `acks` and `max.in.flight.requests.per.connection` need validation
if (idempotenceEnabled) {
final int retries = this.getInt(RETRIES_CONFIG);
if (retries == 0) {
if (userConfiguredIdempotence) {
throw new ConfigException("Must set " + RETRIES_CONFIG + " to non-zero when using the idempotent producer.");
}
log.info("Idempotence will be disabled because {} is set to 0.", RETRIES_CONFIG, retries);
shouldDisableIdempotence = true;
}
final short acks = Short.valueOf(acksStr);
if (acks != (short) -1) {
if (userConfiguredIdempotence) {
throw new ConfigException("Must set " + ACKS_CONFIG + " to all in order to use the idempotent " +
"producer. Otherwise we cannot guarantee idempotence.");
}
log.info("Idempotence will be disabled because {} is set to {}, not set to 'all'.", ACKS_CONFIG, acks);
shouldDisableIdempotence = true;
}
final int inFlightConnection = this.getInt(MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION);
if (MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_FOR_IDEMPOTENCE < inFlightConnection) {
if (userConfiguredIdempotence) {
throw new ConfigException("Must set " + MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION + " to at most 5" +
" to use the idempotent producer.");
}
log.warn("Idempotence will be disabled because {} is set to {}, which is greater than 5. " +
"Please note that in v4.0.0 and onward, this will become an error.", MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, inFlightConnection);
shouldDisableIdempotence = true;
}
}
if (shouldDisableIdempotence) {
configs.put(ENABLE_IDEMPOTENCE_CONFIG, false);
idempotenceEnabled = false;
}
// validate `transaction.id` after validating idempotence dependant configs because `enable.idempotence` config might be overridden
boolean userConfiguredTransactions = originalConfigs.containsKey(TRANSACTIONAL_ID_CONFIG);
if (!idempotenceEnabled && userConfiguredTransactions) {
throw new ConfigException("Cannot set a " + ProducerConfig.TRANSACTIONAL_ID_CONFIG + " without also enabling idempotence.");
}
}
private static String parseAcks(String acksString) {
try {
return acksString.trim().equalsIgnoreCase("all") ? "-1" : Short.parseShort(acksString.trim()) + "";
} catch (NumberFormatException e) {
throw new ConfigException("Invalid configuration value for 'acks': " + acksString);
}
}
static Map<String, Object> appendSerializerToConfig(Map<String, Object> configs,
Serializer<?> keySerializer,
Serializer<?> valueSerializer) {
Map<String, Object> newConfigs = new HashMap<>(configs);
if (keySerializer != null)
newConfigs.put(KEY_SERIALIZER_CLASS_CONFIG, keySerializer.getClass());
if (valueSerializer != null)
newConfigs.put(VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer.getClass());
return newConfigs;
}
public ProducerConfig(Properties props) {
super(CONFIG, props);
}
public ProducerConfig(Map<String, Object> props) {
super(CONFIG, props);
}
ProducerConfig(Map<?, ?> props, boolean doLog) {
super(CONFIG, props, doLog);
}
public static Set<String> configNames() {
return CONFIG.names();
}
public static ConfigDef configDef() {
return new ConfigDef(CONFIG);
}
public static void main(String[] args) {
System.out.println(CONFIG.toHtml(4, config -> "producerconfigs_" + config));
}
}
|
MINOR: cleanup for postProcessAndValidateIdempotenceConfigs method (#12069)
Reviewers: Luke Chen <a747209bc9e65a9d7dd7589245c91b7bed721b13@gmail.com>
|
clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java
|
MINOR: cleanup for postProcessAndValidateIdempotenceConfigs method (#12069)
|
|
Java
|
apache-2.0
|
99783cad2aa2886cda971b0ba7852d5c6c69174b
| 0
|
rsudev/c-geo-opensource,auricgoldfinger/cgeo,brok85/cgeo,vishwakulkarni/cgeo,Bananeweizen/cgeo,ThibaultR/cgeo,superspindel/cgeo,kumy/cgeo,pstorch/cgeo,pstorch/cgeo,cgeo/cgeo,cgeo/cgeo,SammysHP/cgeo,S-Bartfast/cgeo,samueltardieu/cgeo,matej116/cgeo,xiaoyanit/cgeo,ThibaultR/cgeo,matej116/cgeo,vishwakulkarni/cgeo,kumy/cgeo,superspindel/cgeo,tobiasge/cgeo,xiaoyanit/cgeo,SammysHP/cgeo,tobiasge/cgeo,brok85/cgeo,yummy222/cgeo,auricgoldfinger/cgeo,rsudev/c-geo-opensource,matej116/cgeo,KublaikhanGeek/cgeo,ThibaultR/cgeo,yummy222/cgeo,mucek4/cgeo,Bananeweizen/cgeo,cgeo/cgeo,S-Bartfast/cgeo,samueltardieu/cgeo,S-Bartfast/cgeo,brok85/cgeo,mucek4/cgeo,auricgoldfinger/cgeo,Bananeweizen/cgeo,rsudev/c-geo-opensource,KublaikhanGeek/cgeo,cgeo/cgeo,KublaikhanGeek/cgeo,pstorch/cgeo,vishwakulkarni/cgeo,samueltardieu/cgeo,xiaoyanit/cgeo,mucek4/cgeo,kumy/cgeo,superspindel/cgeo,tobiasge/cgeo,SammysHP/cgeo,yummy222/cgeo
|
package cgeo.geocaching.connector.trackable;
import cgeo.geocaching.Trackable;
import cgeo.geocaching.connector.UserAction;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import java.util.Collections;
import java.util.List;
public class UnknownTrackableConnector extends AbstractTrackableConnector {
@Override
public boolean canHandleTrackable(final String geocode) {
return false;
}
@NonNull
@Override
public String getServiceTitle() {
throw new IllegalStateException("this connector does not have a corresponding name.");
}
@Override
public boolean hasTrackableUrls() {
return false;
}
@Override
@Nullable
public Trackable searchTrackable(final String geocode, final String guid, final String id) {
return null;
}
@Override
@NonNull
public TrackableBrand getBrand() {
return TrackableBrand.UNKNOWN;
}
@Override
@NonNull
public List<UserAction> getUserActions() {
return Collections.emptyList();
}
}
|
main/src/cgeo/geocaching/connector/trackable/UnknownTrackableConnector.java
|
package cgeo.geocaching.connector.trackable;
import cgeo.geocaching.Trackable;
import cgeo.geocaching.connector.UserAction;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import java.util.Collections;
import java.util.List;
public class UnknownTrackableConnector extends AbstractTrackableConnector {
@Override
public boolean canHandleTrackable(final String geocode) {
return false;
}
@NonNull
@Override
public String getServiceTitle() {
throw new IllegalStateException("this connector does not have a corresponding name.");
}
@Override
public boolean hasTrackableUrls() {
return false;
}
@Override
@Nullable
public Trackable searchTrackable(final String geocode, final String guid, final String id) {
return null;
}
@Override
@NonNull
public TrackableBrand getBrand() {
return TrackableBrand.UNKNOWN;
}
@Override
@NonNull
public List<UserAction> getUserActions() {
return Collections.EMPTY_LIST;
}
}
|
Use a checked entity instead of an unchecked one
|
main/src/cgeo/geocaching/connector/trackable/UnknownTrackableConnector.java
|
Use a checked entity instead of an unchecked one
|
|
Java
|
apache-2.0
|
f4990a56f18643ac7de66af723d668c1e79359b6
| 0
|
apache/cordova-amazon-fireos,apache/cordova-amazon-fireos,apache/cordova-amazon-fireos,apache/cordova-amazon-fireos
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Locale;
import org.apache.cordova.Config;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.LOG;
import org.apache.cordova.PluginManager;
import org.apache.cordova.PluginResult;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import com.amazon.android.webkit.AmazonWebBackForwardList;
import com.amazon.android.webkit.AmazonWebHistoryItem;
import com.amazon.android.webkit.AmazonWebChromeClient;
import com.amazon.android.webkit.AmazonWebKitFactories;
import com.amazon.android.webkit.AmazonWebSettings;
import com.amazon.android.webkit.AmazonWebView;
import com.amazon.android.webkit.AmazonWebKitFactory;
import android.widget.FrameLayout;
public class CordovaWebView extends AmazonWebView {
/*
* This class is our web view.
*
* @see <a href="http://developer.android.com/guide/webapps/webview.html">WebView guide</a>
* @see <a href="http://developer.android.com/reference/android/webkit/WebView.html">WebView</a>
*/
public static final String TAG = "CordovaWebView";
public static final String CORDOVA_VERSION = "3.4.0-dev";
private ArrayList<Integer> keyDownCodes = new ArrayList<Integer>();
private ArrayList<Integer> keyUpCodes = new ArrayList<Integer>();
public PluginManager pluginManager;
private boolean paused;
private BroadcastReceiver receiver;
/** Activities and other important classes **/
private CordovaInterface cordova;
CordovaWebViewClient viewClient;
@SuppressWarnings("unused")
private CordovaChromeClient chromeClient;
private String url;
// Flag to track that a loadUrl timeout occurred
int loadUrlTimeout = 0;
private boolean bound;
private boolean handleButton = false;
private long lastMenuEventTime = 0;
NativeToJsMessageQueue jsMessageQueue;
ExposedJsApi exposedJsApi;
/** custom view created by the browser (a video player for example) */
private View mCustomView;
private AmazonWebChromeClient.CustomViewCallback mCustomViewCallback;
private ActivityResult mResult = null;
private CordovaResourceApi resourceApi;
private static final String APPCACHE_DIR = "database";
private static final String APPCACHE_DIR_EMPTY = "NONEXISTENT_PATH";
private static final String SAFARI_UA = "Safari";
private static final String MOBILE_SAFARI_UA = "Mobile Safari";
private static final String CORDOVA_AMAZON_FIREOS_UA = "cordova-amazon-fireos/" + CORDOVA_VERSION;
private static final String LOCAL_STORAGE_DIR = "database";
/**
* Arbitrary size limit for app cache resources
*/
public static final long APP_CACHE_LIMIT = (1024 * 1024 * 50);
/**
* An enumeration to specify the desired back-end to use when constructing
* the WebView.
*/
public enum WebViewBackend {
/** The stock Android WebView back-end */
ANDROID,
/** The Chromium AmazonWebView beck-end */
CHROMIUM,
/**
* Automatically select the back-end depending on the device
* configuration
*/
AUTOMATIC
};
class ActivityResult {
int request;
int result;
Intent incoming;
public ActivityResult(int req, int res, Intent intent) {
request = req;
result = res;
incoming = intent;
}
}
static final FrameLayout.LayoutParams COVER_SCREEN_GRAVITY_CENTER =
new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT,
Gravity.CENTER);
/**
* Constructor.
*
* @param context
*/
public CordovaWebView(Context context) {
super(context);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, false, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.loadConfiguration();
this.setup();
}
////fireos_change ////
/**
* Constructor
* The extraData bundle is needed for AmazonWebChromeClient.onCreateWindow callback.
* It's just an opaque data that needs to be passed from one call to the other.
*
* @param context
* @param extraData
*/
public CordovaWebView(Context context, Bundle extraData) {
super(context);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, false, extraData);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.loadConfiguration();
this.setup();
}
////fireos_change ////
/**
* Constructor.
*
* @param context
* @param attrs
*/
public CordovaWebView(Context context, AttributeSet attrs) {
super(context, attrs);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, false, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.setWebChromeClient(new CordovaChromeClient(this.cordova, this));
this.initWebViewClient(this.cordova);
this.loadConfiguration();
this.setup();
}
/**
* Constructor.
*
* @param context
* @param attrs
* @param defStyle
*
*/
public CordovaWebView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, false, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.setWebChromeClient(new CordovaChromeClient(this.cordova, this));
this.loadConfiguration();
this.setup();
}
/**
* Constructor.
*
* @param context
* @param attrs
* @param defStyle
* @param privateBrowsing
*/
@TargetApi(11)
public CordovaWebView(Context context, AttributeSet attrs, int defStyle, boolean privateBrowsing) {
// super(context, attrs, defStyle, privateBrowsing); // DEPRECATED
super(context, attrs, defStyle);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, privateBrowsing, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.setWebChromeClient(new CordovaChromeClient(this.cordova));
this.initWebViewClient(this.cordova);
this.loadConfiguration();
this.setup();
}
/**
* set the WebViewClient, but provide special case handling for IceCreamSandwich.
*/
private void initWebViewClient(CordovaInterface cordova) {
if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB ||
android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.JELLY_BEAN_MR1)
{
this.setWebViewClient(new CordovaWebViewClient(this.cordova, this));
}
else
{
this.setWebViewClient(new IceCreamCordovaWebViewClient(this.cordova, this));
}
}
/**
* Initialize webview.
*/
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private void setup() {
this.setInitialScale(0);
this.setVerticalScrollBarEnabled(false);
if (shouldRequestFocusOnInit()) {
this.requestFocusFromTouch();
}
// Enable JavaScript
AmazonWebSettings settings = this.getSettings();
settings.setJavaScriptEnabled(true);
settings.setMediaPlaybackRequiresUserGesture(false);
// Set the nav dump for HTC 2.x devices (disabling for ICS, deprecated entirely for Jellybean 4.2)
try {
Method gingerbread_getMethod = AmazonWebSettings.class.getMethod("setNavDump", new Class[] { boolean.class });
String manufacturer = android.os.Build.MANUFACTURER;
Log.d(TAG, "CordovaWebView is running on device made by: " + manufacturer);
if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB &&
android.os.Build.MANUFACTURER.contains("HTC"))
{
gingerbread_getMethod.invoke(settings, true);
}
} catch (NoSuchMethodException e) {
Log.d(TAG, "We are on a modern version of Android, we will deprecate HTC 2.3 devices in 2.8");
} catch (IllegalArgumentException e) {
Log.d(TAG, "Doing the NavDump failed with bad arguments");
} catch (IllegalAccessException e) {
Log.d(TAG, "This should never happen: IllegalAccessException means this isn't Android anymore");
} catch (InvocationTargetException e) {
Log.d(TAG, "This should never happen: InvocationTargetException means this isn't Android anymore.");
}
//We don't save any form data in the application
settings.setSaveFormData(false);
settings.setSavePassword(false);
// Jellybean rightfully tried to lock this down. Too bad they didn't give us a whitelist
// while we do this
if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1
|| (getWebViewBackend(this.cordova.getFactory()) == WebViewBackend.CHROMIUM))
Level16Apis.enableUniversalAccess(settings);
if (getWebViewBackend(this.cordova.getFactory()) == WebViewBackend.ANDROID) {
File appCacheDir = this.cordova.getActivity().getDir(APPCACHE_DIR, Context.MODE_PRIVATE);
if (appCacheDir.exists()) {
settings.setAppCachePath(appCacheDir.getPath());
settings.setAppCacheMaxSize(APP_CACHE_LIMIT);
settings.setAppCacheEnabled(true);
} else {
// shouldn't get here...
Log.e(TAG, "Unable to construct application cache directory, feature disabled");
}
File storageDir = this.cordova.getActivity().getDir(LOCAL_STORAGE_DIR, Context.MODE_PRIVATE);
if (storageDir.exists()) {
settings.setDatabasePath(storageDir.getPath());
settings.setDatabaseEnabled(true);
settings.setGeolocationDatabasePath(storageDir.getPath());
} else {
// shouldn't get here...
Log.e(TAG, "Unable to construct local storage directory, feature disabled");
}
} else {
// setting a custom path (as well as the max cache size) is not supported by Chromium,
// however setting the path to a non-null non-empty string is required for it to function
settings.setAppCachePath(APPCACHE_DIR_EMPTY);
settings.setAppCacheEnabled(true);
// enable the local storage database normally with the Chromium back-end
settings.setDatabaseEnabled(true);
}
// Enable DOM storage
settings.setDomStorageEnabled(true);
// Enable built-in geolocation
settings.setGeolocationEnabled(true);
// Fix UserAgent string
String userAgent = settings.getUserAgentString();
if ((userAgent.indexOf(MOBILE_SAFARI_UA) == -1) && (userAgent.indexOf(SAFARI_UA) != -1)) {
// Replace Safari with Mobile Safari
userAgent = userAgent.replace(SAFARI_UA, MOBILE_SAFARI_UA);
}
userAgent = userAgent.concat(" " + CORDOVA_AMAZON_FIREOS_UA);
settings.setUserAgentString(userAgent);
// Fix for CB-1405
// Google issue 4641
this.updateUserAgentString();
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(Intent.ACTION_CONFIGURATION_CHANGED);
if (this.receiver == null) {
this.receiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
updateUserAgentString();
}
};
this.cordova.getActivity().registerReceiver(this.receiver, intentFilter);
}
// end CB-1405
settings.setUseWideViewPort(true);
pluginManager = new PluginManager(this, this.cordova);
jsMessageQueue = new NativeToJsMessageQueue(this, cordova);
exposedJsApi = new ExposedJsApi(pluginManager, jsMessageQueue);
resourceApi = new CordovaResourceApi(this.getContext(), pluginManager);
exposeJsInterface();
}
/**
* The actual back-end used when constructing the WebView. Note that this
* may differ from the requested back-end depending on the device
* configuration.
*
* @return either {@link WebViewBackend#AMAZON} or
* {@link WebViewBackend#ANDROID}
*/
static WebViewBackend getWebViewBackend(AmazonWebKitFactory factory) {
// This is to figure out if WebView is using Chromium based webapp runtime or stock AndroidWebView.
// On Kindle devices default is Chromium based. There is no public API to figure out the difference.
// EmbeddedWebKitFactory is not a plublic class so only way to check is using this AmazonWebKitFactories.EMBEDDED_FACTORY class name.
if (factory.getClass().getName().equals(AmazonWebKitFactories.EMBEDDED_FACTORY) ) {
return WebViewBackend.CHROMIUM;
}
return WebViewBackend.ANDROID;
}
/**
* Override this method to decide whether or not you need to request the
* focus when your application start
*
* @return true unless this method is overriden to return a different value
*/
protected boolean shouldRequestFocusOnInit() {
return true;
}
private void updateUserAgentString() {
this.getSettings().getUserAgentString();
}
private void exposeJsInterface() {
int SDK_INT = Build.VERSION.SDK_INT;
if ((SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1)) {
Log.i(TAG, "Disabled addJavascriptInterface() bridge since Android version is old.");
// Bug being that Java Strings do not get converted to JS strings automatically.
// This isn't hard to work-around on the JS side, but it's easier to just
// use the prompt bridge instead.
return;
}
this.addJavascriptInterface(exposedJsApi, "_cordovaNative");
}
/**
* Set the WebViewClient.
*
* @param client
*/
public void setWebViewClient(CordovaWebViewClient client) {
this.viewClient = client;
super.setWebViewClient(client);
}
/**
* Set the AmazonWebChromeClient.
*
* @param client
*/
public void setWebChromeClient(CordovaChromeClient client) {
this.chromeClient = client;
super.setWebChromeClient(client);
}
public CordovaChromeClient getWebChromeClient() {
return this.chromeClient;
}
/**
* Load the url into the webview.
*
* @param url
*/
@Override
public void loadUrl(String url) {
if (url.equals("about:blank") || url.startsWith("javascript:")) {
this.loadUrlNow(url);
}
else {
String initUrl = this.getProperty("url", null);
// If first page of app, then set URL to load to be the one passed in
if (initUrl == null) {
this.loadUrlIntoView(url);
}
// Otherwise use the URL specified in the activity's extras bundle
else {
this.loadUrlIntoView(initUrl);
}
}
}
/**
* Load the url into the webview after waiting for period of time.
* This is used to display the splashscreen for certain amount of time.
*
* @param url
* @param time The number of ms to wait before loading webview
*/
public void loadUrl(final String url, int time) {
String initUrl = this.getProperty("url", null);
// If first page of app, then set URL to load to be the one passed in
if (initUrl == null) {
this.loadUrlIntoView(url, time);
}
// Otherwise use the URL specified in the activity's extras bundle
else {
this.loadUrlIntoView(initUrl);
}
}
public void loadUrlIntoView(final String url) {
loadUrlIntoView(url, true);
}
/**
* Load the url into the webview.
*
* @param url
*/
public void loadUrlIntoView(final String url, boolean recreatePlugins) {
LOG.d(TAG, ">>> loadUrl(" + url + ")");
if (recreatePlugins) {
this.url = url;
this.pluginManager.init();
}
// Got rid of the timers logic to check for errors/non-responding webpages.
// Timers were creating threading issues and NPE in some cases where app needed to load more urls or navigate back and forth a lot.
// PS. this change exists only on amazon-fireos platform.
// Load url
this.cordova.getActivity().runOnUiThread(new Runnable() {
public void run() {
CordovaWebView.this.loadUrlNow(url);
}
});
}
/**
* Load URL in webview.
*
* @param url
*/
void loadUrlNow(String url) {
if (LOG.isLoggable(LOG.DEBUG) && !url.startsWith("javascript:")) {
LOG.d(TAG, ">>> loadUrlNow()");
}
if (url.startsWith("file://") || url.startsWith("javascript:") || Config.isUrlWhiteListed(url)) {
super.loadUrl(url);
}
}
/**
* Load the url into the webview after waiting for period of time.
* This is used to display the splashscreen for certain amount of time.
*
* @param url
* @param time The number of ms to wait before loading webview
*/
public void loadUrlIntoView(final String url, final int time) {
// If not first page of app, then load immediately
// Add support for browser history if we use it.
if ((url.startsWith("javascript:")) || this.canGoBack()) {
}
// If first page, then show splashscreen
else {
LOG.d(TAG, "loadUrlIntoView(%s, %d)", url, time);
// Send message to show splashscreen now if desired
this.postMessage("splashscreen", "show");
}
// Load url
this.loadUrlIntoView(url);
}
@Override
public void stopLoading() {
viewClient.isCurrentlyLoading = false;
super.stopLoading();
}
public void onScrollChanged(int l, int t, int oldl, int oldt)
{
super.onScrollChanged(l, t, oldl, oldt);
//We should post a message that the scroll changed
LOG.d(TAG, "Scroll changed: oldl = %d, l = %d", oldl, l);
LOG.d(TAG, "Scroll changed: oldt = %d, t = %d", oldt, t);
ScrollEvent myEvent = new ScrollEvent(l, t, oldl, oldt, this);
this.postMessage("onScrollChanged", myEvent);
}
/**
* Send JavaScript statement back to JavaScript.
* (This is a convenience method)
*
* @param statement
*/
public void sendJavascript(String statement) {
this.jsMessageQueue.addJavaScript(statement);
}
/**
* Send a plugin result back to JavaScript.
* (This is a convenience method)
*
* @param result
* @param callbackId
*/
public void sendPluginResult(PluginResult result, String callbackId) {
this.jsMessageQueue.addPluginResult(result, callbackId);
}
/**
* Send a message to all plugins.
*
* @param id The message id
* @param data The message data
*/
public void postMessage(String id, Object data) {
if (this.pluginManager != null) {
this.pluginManager.postMessage(id, data);
}
}
/**
* Go to previous page in history. (We manage our own history)
*
* @return true if we went back, false if we are already at top
*/
public boolean backHistory() {
// Check webview first to see if there is a history
// This is needed to support curPage#diffLink, since they are added to appView's history, but not our history url array (JQMobile behavior)
if (super.canGoBack()) {
printBackForwardList();
super.goBack();
return true;
}
return false;
}
/**
* Load the specified URL in the Cordova webview or a new browser instance.
*
* NOTE: If openExternal is false, only URLs listed in whitelist can be loaded.
*
* @param url The url to load.
* @param openExternal Load url in browser instead of Cordova webview.
* @param clearHistory Clear the history stack, so new page becomes top of history
* @param params Parameters for new app
*/
public void showWebPage(String url, boolean openExternal, boolean clearHistory, HashMap<String, Object> params) {
LOG.d(TAG, "showWebPage(%s, %b, %b, HashMap", url, openExternal, clearHistory);
// If clearing history
if (clearHistory) {
this.clearHistory();
}
// If loading into our webview
if (!openExternal) {
// Make sure url is in whitelist
if (url.startsWith("file://") || Config.isUrlWhiteListed(url)) {
// TODO: What about params?
// Load new URL
this.loadUrl(url);
return;
}
// Load in default viewer if not
LOG.w(TAG, "showWebPage: Cannot load URL into webview since it is not in white list. Loading into browser instead. (URL=" + url + ")");
}
try {
// Omitting the MIME type for file: URLs causes "No Activity found to handle Intent".
// Adding the MIME type to http: URLs causes them to not be handled by the downloader.
Intent intent = new Intent(Intent.ACTION_VIEW);
Uri uri = Uri.parse(url);
if ("file".equals(uri.getScheme())) {
intent.setDataAndType(uri, resourceApi.getMimeType(uri));
} else {
intent.setData(uri);
}
cordova.getActivity().startActivity(intent);
} catch (android.content.ActivityNotFoundException e) {
LOG.e(TAG, "Error loading url " + url, e);
}
}
/**
* Check configuration parameters from Config.
* Approved list of URLs that can be loaded into Cordova
* <access origin="http://server regexp" subdomains="true" />
* Log level: ERROR, WARN, INFO, DEBUG, VERBOSE (default=ERROR)
* <log level="DEBUG" />
*/
private void loadConfiguration() {
if ("true".equals(this.getProperty("Fullscreen", "false"))) {
this.cordova.getActivity().getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
this.cordova.getActivity().getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
}
/**
* Get string property for activity.
*
* @param name
* @param defaultValue
* @return the String value for the named property
*/
public String getProperty(String name, String defaultValue) {
Bundle bundle = this.cordova.getActivity().getIntent().getExtras();
if (bundle == null) {
return defaultValue;
}
name = name.toLowerCase(Locale.getDefault());
Object p = bundle.get(name);
if (p == null) {
return defaultValue;
}
return p.toString();
}
/*
* onKeyDown
*/
@Override
public boolean onKeyDown(int keyCode, KeyEvent event)
{
if(keyDownCodes.contains(keyCode))
{
if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
// only override default behavior is event bound
LOG.d(TAG, "Down Key Hit");
this.loadUrl("javascript:cordova.fireDocumentEvent('volumedownbutton');");
return true;
}
// If volumeup key
else if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
LOG.d(TAG, "Up Key Hit");
this.loadUrl("javascript:cordova.fireDocumentEvent('volumeupbutton');");
return true;
}
else
{
return super.onKeyDown(keyCode, event);
}
}
else if(keyCode == KeyEvent.KEYCODE_BACK)
{
return !(this.startOfHistory()) || this.bound;
}
else if(keyCode == KeyEvent.KEYCODE_MENU)
{
//How did we get here? Is there a childView?
View childView = this.getFocusedChild();
if(childView != null)
{
//Make sure we close the keyboard if it's present
InputMethodManager imm = (InputMethodManager) cordova.getActivity().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(childView.getWindowToken(), 0);
cordova.getActivity().openOptionsMenu();
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event)
{
// If back key
if (keyCode == KeyEvent.KEYCODE_BACK) {
// A custom view is currently displayed (e.g. playing a video)
if(mCustomView != null) {
this.hideCustomView();
} else {
// The webview is currently displayed
// If back key is bound, then send event to JavaScript
if (this.bound) {
this.loadUrl("javascript:cordova.fireDocumentEvent('backbutton');");
return true;
} else {
// If not bound
// Give plugins a chance to override behavior
if (this.pluginManager != null) {
Object returnVal = this.pluginManager.postMessage("onBackPressed", null);
if (returnVal != null && returnVal instanceof Boolean && (Boolean) returnVal) {
// The return value was a true boolean, callback was consumed
return true;
}
}
// Go to previous page in webview if it is possible to go back
if (this.backHistory()) {
return true;
}
// If not, then invoke default behavior
else {
//this.activityState = ACTIVITY_EXITING;
//return false;
// If they hit back button when app is initializing, app should exit instead of hang until initialization (CB2-458)
this.cordova.getActivity().finish();
}
}
}
}
// Legacy
else if (keyCode == KeyEvent.KEYCODE_MENU) {
if (this.lastMenuEventTime < event.getEventTime()) {
this.loadUrl("javascript:cordova.fireDocumentEvent('menubutton');");
}
this.lastMenuEventTime = event.getEventTime();
return super.onKeyUp(keyCode, event);
}
// If search key
else if (keyCode == KeyEvent.KEYCODE_SEARCH) {
this.loadUrl("javascript:cordova.fireDocumentEvent('searchbutton');");
return true;
}
else if(keyUpCodes.contains(keyCode))
{
//What the hell should this do?
return super.onKeyUp(keyCode, event);
}
//Does webkit change this behavior?
return super.onKeyUp(keyCode, event);
}
public void bindButton(boolean override)
{
this.bound = override;
}
public void bindButton(String button, boolean override) {
// TODO Auto-generated method stub
if (button.compareTo("volumeup")==0) {
keyDownCodes.add(KeyEvent.KEYCODE_VOLUME_UP);
}
else if (button.compareTo("volumedown")==0) {
keyDownCodes.add(KeyEvent.KEYCODE_VOLUME_DOWN);
}
}
public void bindButton(int keyCode, boolean keyDown, boolean override) {
if(keyDown)
{
keyDownCodes.add(keyCode);
}
else
{
keyUpCodes.add(keyCode);
}
}
public boolean isBackButtonBound()
{
return this.bound;
}
public void handlePause(boolean keepRunning)
{
LOG.d(TAG, "Handle the pause");
// Send pause event to JavaScript
this.loadUrl("javascript:try{cordova.fireDocumentEvent('pause');}catch(e){console.log('exception firing pause event from native');};");
// Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onPause(keepRunning);
}
// If app doesn't want to run in background
if (!keepRunning) {
// Pause JavaScript timers (including setInterval)
this.pauseTimers();
this.onPause();
}
paused = true;
}
public void handleResume(boolean keepRunning, boolean activityResultKeepRunning)
{
this.loadUrl("javascript:try{cordova.fireDocumentEvent('resume');}catch(e){console.log('exception firing resume event from native');};");
// Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onResume(keepRunning);
}
//resume first and then resumeTimers
this.onResume();
// Resume JavaScript timers (including setInterval)
this.resumeTimers();
paused = false;
}
public void handleDestroy()
{
// Send destroy event to JavaScript
// Since baseUrl is set in loadUrlIntoView, if user hit Back button before loadUrl was called, we'll get an NPE on baseUrl (CB-2458)
this.loadUrl("javascript:try{cordova.require('cordova/channel').onDestroy.fire();}catch(e){};");
// Load blank page so that JavaScript onunload is called
this.loadUrl("about:blank");
// Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onDestroy();
}
// unregister the receiver
if (this.receiver != null) {
try {
this.cordova.getActivity().unregisterReceiver(this.receiver);
} catch (Exception e) {
Log.e(TAG, "Error unregistering configuration receiver: " + e.getMessage(), e);
}
}
}
public void onNewIntent(Intent intent)
{
//Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onNewIntent(intent);
}
}
public boolean isPaused()
{
return paused;
}
public boolean hadKeyEvent() {
return handleButton;
}
// Wrapping these functions in their own class prevents warnings in adb like:
// VFY: unable to resolve virtual method 285: Landroid/webkit/AmazonWebSettings;.setAllowUniversalAccessFromFileURLs
@TargetApi(16)
private static class Level16Apis {
static void enableUniversalAccess(AmazonWebSettings settings) {
settings.setAllowUniversalAccessFromFileURLs(true);
}
}
public void printBackForwardList() {
AmazonWebBackForwardList currentList = this.copyBackForwardList();
int currentSize = currentList.getSize();
for(int i = 0; i < currentSize; ++i)
{
AmazonWebHistoryItem item = currentList.getItemAtIndex(i);
String url = item.getUrl();
LOG.d(TAG, "The URL at index: " + Integer.toString(i) + "is " + url );
}
}
//Can Go Back is BROKEN!
public boolean startOfHistory()
{
AmazonWebBackForwardList currentList = this.copyBackForwardList();
AmazonWebHistoryItem item = currentList.getItemAtIndex(0);
if( item!=null){ // Null-fence in case they haven't called loadUrl yet (CB-2458)
String url = item.getUrl();
String currentUrl = this.getUrl();
LOG.d(TAG, "The current URL is: " + currentUrl);
LOG.d(TAG, "The URL at item 0 is:" + url);
return currentUrl.equals(url);
}
return false;
}
public void showCustomView(View view, AmazonWebChromeClient.CustomViewCallback callback) {
// This code is adapted from the original Android Browser code, licensed under the Apache License, Version 2.0
Log.d(TAG, "showing Custom View");
// if a view already exists then immediately terminate the new one
if (mCustomView != null) {
callback.onCustomViewHidden();
return;
}
// Store the view and its callback for later (to kill it properly)
mCustomView = view;
mCustomViewCallback = callback;
// Add the custom view to its container.
ViewGroup parent = (ViewGroup) this.getParent();
parent.addView(view, COVER_SCREEN_GRAVITY_CENTER);
// Hide the content view.
this.setVisibility(View.GONE);
// Finally show the custom view container.
parent.setVisibility(View.VISIBLE);
parent.bringToFront();
}
public void hideCustomView() {
// This code is adapted from the original Android Browser code, licensed under the Apache License, Version 2.0
Log.d(TAG, "Hiding Custom View");
if (mCustomView == null) return;
// Hide the custom view.
mCustomView.setVisibility(View.GONE);
// Remove the custom view from its container.
ViewGroup parent = (ViewGroup) this.getParent();
parent.removeView(mCustomView);
mCustomView = null;
mCustomViewCallback.onCustomViewHidden();
// Show the content view.
this.setVisibility(View.VISIBLE);
}
/**
* if the video overlay is showing then we need to know
* as it effects back button handling
*
* @return true if custom view is showing
*/
public boolean isCustomViewShowing() {
return mCustomView != null;
}
public AmazonWebBackForwardList restoreState(Bundle savedInstanceState)
{
AmazonWebBackForwardList myList = super.restoreState(savedInstanceState);
Log.d(TAG, "AmazonWebView restoration crew now restoring!");
//Initialize the plugin manager once more
this.pluginManager.init();
return myList;
}
public void storeResult(int requestCode, int resultCode, Intent intent) {
mResult = new ActivityResult(requestCode, resultCode, intent);
}
public CordovaResourceApi getResourceApi() {
return resourceApi;
}
}
|
framework/src/org/apache/cordova/CordovaWebView.java
|
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.cordova;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Locale;
import org.apache.cordova.Config;
import org.apache.cordova.CordovaInterface;
import org.apache.cordova.LOG;
import org.apache.cordova.PluginManager;
import org.apache.cordova.PluginResult;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import com.amazon.android.webkit.AmazonWebBackForwardList;
import com.amazon.android.webkit.AmazonWebHistoryItem;
import com.amazon.android.webkit.AmazonWebChromeClient;
import com.amazon.android.webkit.AmazonWebKitFactories;
import com.amazon.android.webkit.AmazonWebSettings;
import com.amazon.android.webkit.AmazonWebView;
import com.amazon.android.webkit.AmazonWebKitFactory;
import android.widget.FrameLayout;
public class CordovaWebView extends AmazonWebView {
/*
* This class is our web view.
*
* @see <a href="http://developer.android.com/guide/webapps/webview.html">WebView guide</a>
* @see <a href="http://developer.android.com/reference/android/webkit/WebView.html">WebView</a>
*/
public static final String TAG = "CordovaWebView";
public static final String CORDOVA_VERSION = "3.4.0-dev";
private ArrayList<Integer> keyDownCodes = new ArrayList<Integer>();
private ArrayList<Integer> keyUpCodes = new ArrayList<Integer>();
public PluginManager pluginManager;
private boolean paused;
private BroadcastReceiver receiver;
/** Activities and other important classes **/
private CordovaInterface cordova;
CordovaWebViewClient viewClient;
@SuppressWarnings("unused")
private CordovaChromeClient chromeClient;
private String url;
// Flag to track that a loadUrl timeout occurred
int loadUrlTimeout = 0;
private boolean bound;
private boolean handleButton = false;
private long lastMenuEventTime = 0;
NativeToJsMessageQueue jsMessageQueue;
ExposedJsApi exposedJsApi;
/** custom view created by the browser (a video player for example) */
private View mCustomView;
private AmazonWebChromeClient.CustomViewCallback mCustomViewCallback;
private ActivityResult mResult = null;
private CordovaResourceApi resourceApi;
private static final String APPCACHE_DIR = "database";
private static final String APPCACHE_DIR_EMPTY = "NONEXISTENT_PATH";
private static final String SAFARI_UA = "Safari";
private static final String MOBILE_SAFARI_UA = "Mobile Safari";
private static final String CORDOVA_AMAZON_FIREOS_UA = "cordova-amazon-fireos/" + CORDOVA_VERSION;
private static final String LOCAL_STORAGE_DIR = "database";
/**
* Arbitrary size limit for app cache resources
*/
public static final long APP_CACHE_LIMIT = (1024 * 1024 * 50);
/**
* An enumeration to specify the desired back-end to use when constructing
* the WebView.
*/
public enum WebViewBackend {
/** The stock Android WebView back-end */
ANDROID,
/** The Chromium AmazonWebView beck-end */
CHROMIUM,
/**
* Automatically select the back-end depending on the device
* configuration
*/
AUTOMATIC
};
class ActivityResult {
int request;
int result;
Intent incoming;
public ActivityResult(int req, int res, Intent intent) {
request = req;
result = res;
incoming = intent;
}
}
static final FrameLayout.LayoutParams COVER_SCREEN_GRAVITY_CENTER =
new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT,
Gravity.CENTER);
/**
* Constructor.
*
* @param context
*/
public CordovaWebView(Context context) {
super(context);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, false, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.loadConfiguration();
this.setup();
}
/**
* Constructor.
*
* @param context
* @param attrs
*/
public CordovaWebView(Context context, AttributeSet attrs) {
super(context, attrs);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, false, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.setWebChromeClient(new CordovaChromeClient(this.cordova, this));
this.initWebViewClient(this.cordova);
this.loadConfiguration();
this.setup();
}
/**
* Constructor.
*
* @param context
* @param attrs
* @param defStyle
*
*/
public CordovaWebView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, false, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.setWebChromeClient(new CordovaChromeClient(this.cordova, this));
this.loadConfiguration();
this.setup();
}
/**
* Constructor.
*
* @param context
* @param attrs
* @param defStyle
* @param privateBrowsing
*/
@TargetApi(11)
public CordovaWebView(Context context, AttributeSet attrs, int defStyle, boolean privateBrowsing) {
// super(context, attrs, defStyle, privateBrowsing); // DEPRECATED
super(context, attrs, defStyle);
if (CordovaInterface.class.isInstance(context))
{
this.cordova = (CordovaInterface) context;
this.cordova.getFactory().initializeWebView(this, 0xFFFFFF, privateBrowsing, null);
}
else
{
Log.d(TAG, "Your activity must implement CordovaInterface to work");
}
this.setWebChromeClient(new CordovaChromeClient(this.cordova));
this.initWebViewClient(this.cordova);
this.loadConfiguration();
this.setup();
}
/**
* set the WebViewClient, but provide special case handling for IceCreamSandwich.
*/
private void initWebViewClient(CordovaInterface cordova) {
if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB ||
android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.JELLY_BEAN_MR1)
{
this.setWebViewClient(new CordovaWebViewClient(this.cordova, this));
}
else
{
this.setWebViewClient(new IceCreamCordovaWebViewClient(this.cordova, this));
}
}
/**
* Initialize webview.
*/
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
private void setup() {
this.setInitialScale(0);
this.setVerticalScrollBarEnabled(false);
if (shouldRequestFocusOnInit()) {
this.requestFocusFromTouch();
}
// Enable JavaScript
AmazonWebSettings settings = this.getSettings();
settings.setJavaScriptEnabled(true);
settings.setMediaPlaybackRequiresUserGesture(false);
// Set the nav dump for HTC 2.x devices (disabling for ICS, deprecated entirely for Jellybean 4.2)
try {
Method gingerbread_getMethod = AmazonWebSettings.class.getMethod("setNavDump", new Class[] { boolean.class });
String manufacturer = android.os.Build.MANUFACTURER;
Log.d(TAG, "CordovaWebView is running on device made by: " + manufacturer);
if(android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.HONEYCOMB &&
android.os.Build.MANUFACTURER.contains("HTC"))
{
gingerbread_getMethod.invoke(settings, true);
}
} catch (NoSuchMethodException e) {
Log.d(TAG, "We are on a modern version of Android, we will deprecate HTC 2.3 devices in 2.8");
} catch (IllegalArgumentException e) {
Log.d(TAG, "Doing the NavDump failed with bad arguments");
} catch (IllegalAccessException e) {
Log.d(TAG, "This should never happen: IllegalAccessException means this isn't Android anymore");
} catch (InvocationTargetException e) {
Log.d(TAG, "This should never happen: InvocationTargetException means this isn't Android anymore.");
}
//We don't save any form data in the application
settings.setSaveFormData(false);
settings.setSavePassword(false);
// Jellybean rightfully tried to lock this down. Too bad they didn't give us a whitelist
// while we do this
if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1
|| (getWebViewBackend(this.cordova.getFactory()) == WebViewBackend.CHROMIUM))
Level16Apis.enableUniversalAccess(settings);
if (getWebViewBackend(this.cordova.getFactory()) == WebViewBackend.ANDROID) {
File appCacheDir = this.cordova.getActivity().getDir(APPCACHE_DIR, Context.MODE_PRIVATE);
if (appCacheDir.exists()) {
settings.setAppCachePath(appCacheDir.getPath());
settings.setAppCacheMaxSize(APP_CACHE_LIMIT);
settings.setAppCacheEnabled(true);
} else {
// shouldn't get here...
Log.e(TAG, "Unable to construct application cache directory, feature disabled");
}
File storageDir = this.cordova.getActivity().getDir(LOCAL_STORAGE_DIR, Context.MODE_PRIVATE);
if (storageDir.exists()) {
settings.setDatabasePath(storageDir.getPath());
settings.setDatabaseEnabled(true);
settings.setGeolocationDatabasePath(storageDir.getPath());
} else {
// shouldn't get here...
Log.e(TAG, "Unable to construct local storage directory, feature disabled");
}
} else {
// setting a custom path (as well as the max cache size) is not supported by Chromium,
// however setting the path to a non-null non-empty string is required for it to function
settings.setAppCachePath(APPCACHE_DIR_EMPTY);
settings.setAppCacheEnabled(true);
// enable the local storage database normally with the Chromium back-end
settings.setDatabaseEnabled(true);
}
// Enable DOM storage
settings.setDomStorageEnabled(true);
// Enable built-in geolocation
settings.setGeolocationEnabled(true);
// Fix UserAgent string
String userAgent = settings.getUserAgentString();
if ((userAgent.indexOf(MOBILE_SAFARI_UA) == -1) && (userAgent.indexOf(SAFARI_UA) != -1)) {
// Replace Safari with Mobile Safari
userAgent = userAgent.replace(SAFARI_UA, MOBILE_SAFARI_UA);
}
userAgent = userAgent.concat(" " + CORDOVA_AMAZON_FIREOS_UA);
settings.setUserAgentString(userAgent);
// Fix for CB-1405
// Google issue 4641
this.updateUserAgentString();
IntentFilter intentFilter = new IntentFilter();
intentFilter.addAction(Intent.ACTION_CONFIGURATION_CHANGED);
if (this.receiver == null) {
this.receiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
updateUserAgentString();
}
};
this.cordova.getActivity().registerReceiver(this.receiver, intentFilter);
}
// end CB-1405
settings.setUseWideViewPort(true);
pluginManager = new PluginManager(this, this.cordova);
jsMessageQueue = new NativeToJsMessageQueue(this, cordova);
exposedJsApi = new ExposedJsApi(pluginManager, jsMessageQueue);
resourceApi = new CordovaResourceApi(this.getContext(), pluginManager);
exposeJsInterface();
}
/**
* The actual back-end used when constructing the WebView. Note that this
* may differ from the requested back-end depending on the device
* configuration.
*
* @return either {@link WebViewBackend#AMAZON} or
* {@link WebViewBackend#ANDROID}
*/
static WebViewBackend getWebViewBackend(AmazonWebKitFactory factory) {
// This is to figure out if WebView is using Chromium based webapp runtime or stock AndroidWebView.
// On Kindle devices default is Chromium based. There is no public API to figure out the difference.
// EmbeddedWebKitFactory is not a plublic class so only way to check is using this AmazonWebKitFactories.EMBEDDED_FACTORY class name.
if (factory.getClass().getName().equals(AmazonWebKitFactories.EMBEDDED_FACTORY) ) {
return WebViewBackend.CHROMIUM;
}
return WebViewBackend.ANDROID;
}
/**
* Override this method to decide whether or not you need to request the
* focus when your application start
*
* @return true unless this method is overriden to return a different value
*/
protected boolean shouldRequestFocusOnInit() {
return true;
}
private void updateUserAgentString() {
this.getSettings().getUserAgentString();
}
private void exposeJsInterface() {
int SDK_INT = Build.VERSION.SDK_INT;
if ((SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1)) {
Log.i(TAG, "Disabled addJavascriptInterface() bridge since Android version is old.");
// Bug being that Java Strings do not get converted to JS strings automatically.
// This isn't hard to work-around on the JS side, but it's easier to just
// use the prompt bridge instead.
return;
}
this.addJavascriptInterface(exposedJsApi, "_cordovaNative");
}
/**
* Set the WebViewClient.
*
* @param client
*/
public void setWebViewClient(CordovaWebViewClient client) {
this.viewClient = client;
super.setWebViewClient(client);
}
/**
* Set the AmazonWebChromeClient.
*
* @param client
*/
public void setWebChromeClient(CordovaChromeClient client) {
this.chromeClient = client;
super.setWebChromeClient(client);
}
public CordovaChromeClient getWebChromeClient() {
return this.chromeClient;
}
/**
* Load the url into the webview.
*
* @param url
*/
@Override
public void loadUrl(String url) {
if (url.equals("about:blank") || url.startsWith("javascript:")) {
this.loadUrlNow(url);
}
else {
String initUrl = this.getProperty("url", null);
// If first page of app, then set URL to load to be the one passed in
if (initUrl == null) {
this.loadUrlIntoView(url);
}
// Otherwise use the URL specified in the activity's extras bundle
else {
this.loadUrlIntoView(initUrl);
}
}
}
/**
* Load the url into the webview after waiting for period of time.
* This is used to display the splashscreen for certain amount of time.
*
* @param url
* @param time The number of ms to wait before loading webview
*/
public void loadUrl(final String url, int time) {
String initUrl = this.getProperty("url", null);
// If first page of app, then set URL to load to be the one passed in
if (initUrl == null) {
this.loadUrlIntoView(url, time);
}
// Otherwise use the URL specified in the activity's extras bundle
else {
this.loadUrlIntoView(initUrl);
}
}
public void loadUrlIntoView(final String url) {
loadUrlIntoView(url, true);
}
/**
* Load the url into the webview.
*
* @param url
*/
public void loadUrlIntoView(final String url, boolean recreatePlugins) {
LOG.d(TAG, ">>> loadUrl(" + url + ")");
if (recreatePlugins) {
this.url = url;
this.pluginManager.init();
}
// Got rid of the timers logic to check for errors/non-responding webpages.
// Timers were creating threading issues and NPE in some cases where app needed to load more urls or navigate back and forth a lot.
// PS. this change exists only on amazon-fireos platform.
// Load url
this.cordova.getActivity().runOnUiThread(new Runnable() {
public void run() {
CordovaWebView.this.loadUrlNow(url);
}
});
}
/**
* Load URL in webview.
*
* @param url
*/
void loadUrlNow(String url) {
if (LOG.isLoggable(LOG.DEBUG) && !url.startsWith("javascript:")) {
LOG.d(TAG, ">>> loadUrlNow()");
}
if (url.startsWith("file://") || url.startsWith("javascript:") || Config.isUrlWhiteListed(url)) {
super.loadUrl(url);
}
}
/**
* Load the url into the webview after waiting for period of time.
* This is used to display the splashscreen for certain amount of time.
*
* @param url
* @param time The number of ms to wait before loading webview
*/
public void loadUrlIntoView(final String url, final int time) {
// If not first page of app, then load immediately
// Add support for browser history if we use it.
if ((url.startsWith("javascript:")) || this.canGoBack()) {
}
// If first page, then show splashscreen
else {
LOG.d(TAG, "loadUrlIntoView(%s, %d)", url, time);
// Send message to show splashscreen now if desired
this.postMessage("splashscreen", "show");
}
// Load url
this.loadUrlIntoView(url);
}
@Override
public void stopLoading() {
viewClient.isCurrentlyLoading = false;
super.stopLoading();
}
public void onScrollChanged(int l, int t, int oldl, int oldt)
{
super.onScrollChanged(l, t, oldl, oldt);
//We should post a message that the scroll changed
LOG.d(TAG, "Scroll changed: oldl = %d, l = %d", oldl, l);
LOG.d(TAG, "Scroll changed: oldt = %d, t = %d", oldt, t);
ScrollEvent myEvent = new ScrollEvent(l, t, oldl, oldt, this);
this.postMessage("onScrollChanged", myEvent);
}
/**
* Send JavaScript statement back to JavaScript.
* (This is a convenience method)
*
* @param statement
*/
public void sendJavascript(String statement) {
this.jsMessageQueue.addJavaScript(statement);
}
/**
* Send a plugin result back to JavaScript.
* (This is a convenience method)
*
* @param result
* @param callbackId
*/
public void sendPluginResult(PluginResult result, String callbackId) {
this.jsMessageQueue.addPluginResult(result, callbackId);
}
/**
* Send a message to all plugins.
*
* @param id The message id
* @param data The message data
*/
public void postMessage(String id, Object data) {
if (this.pluginManager != null) {
this.pluginManager.postMessage(id, data);
}
}
/**
* Go to previous page in history. (We manage our own history)
*
* @return true if we went back, false if we are already at top
*/
public boolean backHistory() {
// Check webview first to see if there is a history
// This is needed to support curPage#diffLink, since they are added to appView's history, but not our history url array (JQMobile behavior)
if (super.canGoBack()) {
printBackForwardList();
super.goBack();
return true;
}
return false;
}
/**
* Load the specified URL in the Cordova webview or a new browser instance.
*
* NOTE: If openExternal is false, only URLs listed in whitelist can be loaded.
*
* @param url The url to load.
* @param openExternal Load url in browser instead of Cordova webview.
* @param clearHistory Clear the history stack, so new page becomes top of history
* @param params Parameters for new app
*/
public void showWebPage(String url, boolean openExternal, boolean clearHistory, HashMap<String, Object> params) {
LOG.d(TAG, "showWebPage(%s, %b, %b, HashMap", url, openExternal, clearHistory);
// If clearing history
if (clearHistory) {
this.clearHistory();
}
// If loading into our webview
if (!openExternal) {
// Make sure url is in whitelist
if (url.startsWith("file://") || Config.isUrlWhiteListed(url)) {
// TODO: What about params?
// Load new URL
this.loadUrl(url);
return;
}
// Load in default viewer if not
LOG.w(TAG, "showWebPage: Cannot load URL into webview since it is not in white list. Loading into browser instead. (URL=" + url + ")");
}
try {
// Omitting the MIME type for file: URLs causes "No Activity found to handle Intent".
// Adding the MIME type to http: URLs causes them to not be handled by the downloader.
Intent intent = new Intent(Intent.ACTION_VIEW);
Uri uri = Uri.parse(url);
if ("file".equals(uri.getScheme())) {
intent.setDataAndType(uri, resourceApi.getMimeType(uri));
} else {
intent.setData(uri);
}
cordova.getActivity().startActivity(intent);
} catch (android.content.ActivityNotFoundException e) {
LOG.e(TAG, "Error loading url " + url, e);
}
}
/**
* Check configuration parameters from Config.
* Approved list of URLs that can be loaded into Cordova
* <access origin="http://server regexp" subdomains="true" />
* Log level: ERROR, WARN, INFO, DEBUG, VERBOSE (default=ERROR)
* <log level="DEBUG" />
*/
private void loadConfiguration() {
if ("true".equals(this.getProperty("Fullscreen", "false"))) {
this.cordova.getActivity().getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
this.cordova.getActivity().getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
}
/**
* Get string property for activity.
*
* @param name
* @param defaultValue
* @return the String value for the named property
*/
public String getProperty(String name, String defaultValue) {
Bundle bundle = this.cordova.getActivity().getIntent().getExtras();
if (bundle == null) {
return defaultValue;
}
name = name.toLowerCase(Locale.getDefault());
Object p = bundle.get(name);
if (p == null) {
return defaultValue;
}
return p.toString();
}
/*
* onKeyDown
*/
@Override
public boolean onKeyDown(int keyCode, KeyEvent event)
{
if(keyDownCodes.contains(keyCode))
{
if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
// only override default behavior is event bound
LOG.d(TAG, "Down Key Hit");
this.loadUrl("javascript:cordova.fireDocumentEvent('volumedownbutton');");
return true;
}
// If volumeup key
else if (keyCode == KeyEvent.KEYCODE_VOLUME_UP) {
LOG.d(TAG, "Up Key Hit");
this.loadUrl("javascript:cordova.fireDocumentEvent('volumeupbutton');");
return true;
}
else
{
return super.onKeyDown(keyCode, event);
}
}
else if(keyCode == KeyEvent.KEYCODE_BACK)
{
return !(this.startOfHistory()) || this.bound;
}
else if(keyCode == KeyEvent.KEYCODE_MENU)
{
//How did we get here? Is there a childView?
View childView = this.getFocusedChild();
if(childView != null)
{
//Make sure we close the keyboard if it's present
InputMethodManager imm = (InputMethodManager) cordova.getActivity().getSystemService(Context.INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(childView.getWindowToken(), 0);
cordova.getActivity().openOptionsMenu();
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}
return super.onKeyDown(keyCode, event);
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event)
{
// If back key
if (keyCode == KeyEvent.KEYCODE_BACK) {
// A custom view is currently displayed (e.g. playing a video)
if(mCustomView != null) {
this.hideCustomView();
} else {
// The webview is currently displayed
// If back key is bound, then send event to JavaScript
if (this.bound) {
this.loadUrl("javascript:cordova.fireDocumentEvent('backbutton');");
return true;
} else {
// If not bound
// Give plugins a chance to override behavior
if (this.pluginManager != null) {
Object returnVal = this.pluginManager.postMessage("onBackPressed", null);
if (returnVal != null && returnVal instanceof Boolean && (Boolean) returnVal) {
// The return value was a true boolean, callback was consumed
return true;
}
}
// Go to previous page in webview if it is possible to go back
if (this.backHistory()) {
return true;
}
// If not, then invoke default behavior
else {
//this.activityState = ACTIVITY_EXITING;
//return false;
// If they hit back button when app is initializing, app should exit instead of hang until initialization (CB2-458)
this.cordova.getActivity().finish();
}
}
}
}
// Legacy
else if (keyCode == KeyEvent.KEYCODE_MENU) {
if (this.lastMenuEventTime < event.getEventTime()) {
this.loadUrl("javascript:cordova.fireDocumentEvent('menubutton');");
}
this.lastMenuEventTime = event.getEventTime();
return super.onKeyUp(keyCode, event);
}
// If search key
else if (keyCode == KeyEvent.KEYCODE_SEARCH) {
this.loadUrl("javascript:cordova.fireDocumentEvent('searchbutton');");
return true;
}
else if(keyUpCodes.contains(keyCode))
{
//What the hell should this do?
return super.onKeyUp(keyCode, event);
}
//Does webkit change this behavior?
return super.onKeyUp(keyCode, event);
}
public void bindButton(boolean override)
{
this.bound = override;
}
public void bindButton(String button, boolean override) {
// TODO Auto-generated method stub
if (button.compareTo("volumeup")==0) {
keyDownCodes.add(KeyEvent.KEYCODE_VOLUME_UP);
}
else if (button.compareTo("volumedown")==0) {
keyDownCodes.add(KeyEvent.KEYCODE_VOLUME_DOWN);
}
}
public void bindButton(int keyCode, boolean keyDown, boolean override) {
if(keyDown)
{
keyDownCodes.add(keyCode);
}
else
{
keyUpCodes.add(keyCode);
}
}
public boolean isBackButtonBound()
{
return this.bound;
}
public void handlePause(boolean keepRunning)
{
LOG.d(TAG, "Handle the pause");
// Send pause event to JavaScript
this.loadUrl("javascript:try{cordova.fireDocumentEvent('pause');}catch(e){console.log('exception firing pause event from native');};");
// Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onPause(keepRunning);
}
// If app doesn't want to run in background
if (!keepRunning) {
// Pause JavaScript timers (including setInterval)
this.pauseTimers();
this.onPause();
}
paused = true;
}
public void handleResume(boolean keepRunning, boolean activityResultKeepRunning)
{
this.loadUrl("javascript:try{cordova.fireDocumentEvent('resume');}catch(e){console.log('exception firing resume event from native');};");
// Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onResume(keepRunning);
}
//resume first and then resumeTimers
this.onResume();
// Resume JavaScript timers (including setInterval)
this.resumeTimers();
paused = false;
}
public void handleDestroy()
{
// Send destroy event to JavaScript
// Since baseUrl is set in loadUrlIntoView, if user hit Back button before loadUrl was called, we'll get an NPE on baseUrl (CB-2458)
this.loadUrl("javascript:try{cordova.require('cordova/channel').onDestroy.fire();}catch(e){};");
// Load blank page so that JavaScript onunload is called
this.loadUrl("about:blank");
// Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onDestroy();
}
// unregister the receiver
if (this.receiver != null) {
try {
this.cordova.getActivity().unregisterReceiver(this.receiver);
} catch (Exception e) {
Log.e(TAG, "Error unregistering configuration receiver: " + e.getMessage(), e);
}
}
}
public void onNewIntent(Intent intent)
{
//Forward to plugins
if (this.pluginManager != null) {
this.pluginManager.onNewIntent(intent);
}
}
public boolean isPaused()
{
return paused;
}
public boolean hadKeyEvent() {
return handleButton;
}
// Wrapping these functions in their own class prevents warnings in adb like:
// VFY: unable to resolve virtual method 285: Landroid/webkit/AmazonWebSettings;.setAllowUniversalAccessFromFileURLs
@TargetApi(16)
private static class Level16Apis {
static void enableUniversalAccess(AmazonWebSettings settings) {
settings.setAllowUniversalAccessFromFileURLs(true);
}
}
public void printBackForwardList() {
AmazonWebBackForwardList currentList = this.copyBackForwardList();
int currentSize = currentList.getSize();
for(int i = 0; i < currentSize; ++i)
{
AmazonWebHistoryItem item = currentList.getItemAtIndex(i);
String url = item.getUrl();
LOG.d(TAG, "The URL at index: " + Integer.toString(i) + "is " + url );
}
}
//Can Go Back is BROKEN!
public boolean startOfHistory()
{
AmazonWebBackForwardList currentList = this.copyBackForwardList();
AmazonWebHistoryItem item = currentList.getItemAtIndex(0);
if( item!=null){ // Null-fence in case they haven't called loadUrl yet (CB-2458)
String url = item.getUrl();
String currentUrl = this.getUrl();
LOG.d(TAG, "The current URL is: " + currentUrl);
LOG.d(TAG, "The URL at item 0 is:" + url);
return currentUrl.equals(url);
}
return false;
}
public void showCustomView(View view, AmazonWebChromeClient.CustomViewCallback callback) {
// This code is adapted from the original Android Browser code, licensed under the Apache License, Version 2.0
Log.d(TAG, "showing Custom View");
// if a view already exists then immediately terminate the new one
if (mCustomView != null) {
callback.onCustomViewHidden();
return;
}
// Store the view and its callback for later (to kill it properly)
mCustomView = view;
mCustomViewCallback = callback;
// Add the custom view to its container.
ViewGroup parent = (ViewGroup) this.getParent();
parent.addView(view, COVER_SCREEN_GRAVITY_CENTER);
// Hide the content view.
this.setVisibility(View.GONE);
// Finally show the custom view container.
parent.setVisibility(View.VISIBLE);
parent.bringToFront();
}
public void hideCustomView() {
// This code is adapted from the original Android Browser code, licensed under the Apache License, Version 2.0
Log.d(TAG, "Hiding Custom View");
if (mCustomView == null) return;
// Hide the custom view.
mCustomView.setVisibility(View.GONE);
// Remove the custom view from its container.
ViewGroup parent = (ViewGroup) this.getParent();
parent.removeView(mCustomView);
mCustomView = null;
mCustomViewCallback.onCustomViewHidden();
// Show the content view.
this.setVisibility(View.VISIBLE);
}
/**
* if the video overlay is showing then we need to know
* as it effects back button handling
*
* @return true if custom view is showing
*/
public boolean isCustomViewShowing() {
return mCustomView != null;
}
public AmazonWebBackForwardList restoreState(Bundle savedInstanceState)
{
AmazonWebBackForwardList myList = super.restoreState(savedInstanceState);
Log.d(TAG, "AmazonWebView restoration crew now restoring!");
//Initialize the plugin manager once more
this.pluginManager.init();
return myList;
}
public void storeResult(int requestCode, int resultCode, Intent intent) {
mResult = new ActivityResult(requestCode, resultCode, intent);
}
public CordovaResourceApi getResourceApi() {
return resourceApi;
}
}
|
Added overloaded constructor for CordovaWebView with Bundle as parameter.
Need it for AmazonWebChromeClient.onCreateWindow callback.
|
framework/src/org/apache/cordova/CordovaWebView.java
|
Added overloaded constructor for CordovaWebView with Bundle as parameter. Need it for AmazonWebChromeClient.onCreateWindow callback.
|
|
Java
|
apache-2.0
|
ccc3f86e2d0b5a33b2b8ce0106c6a5f515f5b63e
| 0
|
apache/continuum,apache/continuum,apache/continuum
|
package org.apache.maven.continuum.execution.maven.m1;
/*
* Copyright 2004-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.continuum.model.project.Project;
import org.apache.maven.continuum.model.project.ProjectDependency;
import org.apache.maven.continuum.model.project.ProjectDeveloper;
import org.apache.maven.continuum.model.project.ProjectNotifier;
import org.apache.maven.continuum.notification.ContinuumRecipientSource;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
/**
* @author <a href="mailto:trygvis@inamo.no">Trygve Laugstøl</a>
* @version $Id$
*/
public class DefaultMavenOneMetadataHelper
extends AbstractLogEnabled
implements MavenOneMetadataHelper
{
// ----------------------------------------------------------------------
// MavenOneMetadataHelper Implementation
// ----------------------------------------------------------------------
public void mapMetadata( File metadata, Project project )
throws MavenOneMetadataHelperException
{
Xpp3Dom mavenProject;
try
{
mavenProject = Xpp3DomBuilder.build( new FileReader( metadata ) );
}
catch ( Exception e )
{
throw new MavenOneMetadataHelperException( "Error while reading maven POM.", e );
}
// ----------------------------------------------------------------------
// We cannot deal with projects that use the <extend/> element because
// we don't have the whole source tree and we might be missing elements
// that are present in the parent.
// ----------------------------------------------------------------------
String extend = getValue( mavenProject, "extend", null );
if ( extend != null )
{
throw new MavenOneMetadataHelperException( "Cannot use a POM with an 'extend' element." );
}
// ----------------------------------------------------------------------
// Artifact and group id
// ----------------------------------------------------------------------
String groupId;
String artifactId;
String id = getValue( mavenProject, "id", null );
if ( !StringUtils.isEmpty( id ) )
{
groupId = id;
artifactId = id;
}
else
{
groupId = getValue( mavenProject, "groupId", project.getGroupId() );
if ( StringUtils.isEmpty( groupId ) )
{
throw new MavenOneMetadataHelperException( "Missing 'groupId' element in the POM." );
}
artifactId = getValue( mavenProject, "artifactId", project.getArtifactId() );
if ( StringUtils.isEmpty( artifactId ) )
{
throw new MavenOneMetadataHelperException( "Missing 'artifactId' element in the POM." );
}
}
// ----------------------------------------------------------------------
// version
// ----------------------------------------------------------------------
String version = getValue( mavenProject, "currentVersion", project.getVersion() );
if ( StringUtils.isEmpty( project.getVersion() ) && StringUtils.isEmpty( version ) )
{
throw new MavenOneMetadataHelperException( "Missing 'version' element in the POM." );
}
// ----------------------------------------------------------------------
// name
// ----------------------------------------------------------------------
String name = getValue( mavenProject, "name", project.getName() );
if ( StringUtils.isEmpty( project.getName() ) && StringUtils.isEmpty( name ) )
{
throw new MavenOneMetadataHelperException( "Missing 'name' element in POM." );
}
// ----------------------------------------------------------------------
// scm
// ----------------------------------------------------------------------
Xpp3Dom repository = mavenProject.getChild( "repository" );
String scmConnection;
if ( repository == null )
{
if ( !StringUtils.isEmpty( project.getScmUrl() ) )
{
scmConnection = project.getScmUrl();
}
else
{
throw new MavenOneMetadataHelperException( "Missing 'repository' element in the POM." );
}
}
else
{
scmConnection = getValue( repository, "developerConnection", project.getScmUrl() );
scmConnection = getValue( repository, "connection", scmConnection );
if ( StringUtils.isEmpty( scmConnection ) )
{
throw new MavenOneMetadataHelperException(
"Missing both anonymous and developer SCM connection URLs." );
}
}
// ----------------------------------------------------------------------
// Developers
// ----------------------------------------------------------------------
Xpp3Dom developers = mavenProject.getChild( "developers" );
if ( developers != null )
{
Xpp3Dom[] developersList = developers.getChildren();
List cds = new ArrayList();
for ( int i = 0; i < developersList.length; i++ )
{
Xpp3Dom developer = developersList[i];
ProjectDeveloper cd = new ProjectDeveloper();
cd.setScmId( getValue( developer, "id", null ) );
cd.setName( getValue( developer, "name", null ) );
cd.setEmail( getValue( developer, "email", null ) );
cds.add( cd );
}
project.setDevelopers( cds );
}
// ----------------------------------------------------------------------
// Dependencies
// ----------------------------------------------------------------------
Xpp3Dom dependencies = mavenProject.getChild( "dependencies" );
if ( dependencies != null )
{
Xpp3Dom[] dependenciesList = dependencies.getChildren();
List deps = new ArrayList();
for ( int i = 0; i < dependenciesList.length; i++ )
{
Xpp3Dom dependency = dependenciesList[i];
ProjectDependency cd = new ProjectDependency();
if ( getValue( dependency, "groupId", null ) != null )
{
cd.setGroupId( getValue( dependency, "groupId", null ) );
cd.setArtifactId( getValue( dependency, "artifactId", null ) );
}
else
{
cd.setGroupId( getValue( dependency, "id", null ) );
cd.setArtifactId( getValue( dependency, "id", null ) );
}
cd.setVersion( getValue( dependency, "version", null ) );
deps.add( cd );
}
project.setDependencies( deps );
}
// ----------------------------------------------------------------------
// notifiers
// ----------------------------------------------------------------------
Xpp3Dom build = mavenProject.getChild( "build" );
List notifiers = null;
ProjectNotifier notifier = new ProjectNotifier();
if ( build == null )
{
if ( project.getNotifiers() != null && !project.getNotifiers().isEmpty() )
{
notifiers = project.getNotifiers();
}
else
{
throw new MavenOneMetadataHelperException( "Missing 'build' element in the POM." );
}
}
else
{
String currentNagEmailAddress = null;
if ( project.getNotifiers() != null && !project.getNotifiers().isEmpty() )
{
for ( Iterator i = project.getNotifiers().iterator(); i.hasNext(); )
{
ProjectNotifier notif = (ProjectNotifier) i.next();
// Can we have an other type for maven 1 project?
if ( "mail".equals( notif.getType() ) )
{
currentNagEmailAddress = (String) notif.getConfiguration().get(
ContinuumRecipientSource.ADDRESS_FIELD );
}
}
}
String nagEmailAddress = getValue( build, "nagEmailAddress", currentNagEmailAddress );
if ( nagEmailAddress != null )
{
Properties props = new Properties();
props.put( ContinuumRecipientSource.ADDRESS_FIELD, nagEmailAddress );
notifier.setConfiguration( props );
notifier.setFrom( ProjectNotifier.FROM_PROJECT );
}
}
if ( notifier == null && notifier.getConfiguration().isEmpty() )
{
throw new MavenOneMetadataHelperException(
"Missing 'nagEmailAddress' element in the 'build' element in the POM." );
}
else
{
if ( notifiers == null )
{
notifiers = new ArrayList();
}
notifiers.add( notifier );
// Add notifier defined by user
for ( Iterator i = project.getNotifiers().iterator(); i.hasNext(); )
{
ProjectNotifier notif = (ProjectNotifier) i.next();
if ( notif.isFromUser() )
{
ProjectNotifier userNotifier = new ProjectNotifier();
userNotifier.setType( notif.getType() );
userNotifier.setConfiguration( notif.getConfiguration() );
userNotifier.setFrom( notif.getFrom() );
notifiers.add( userNotifier );
}
}
}
// ----------------------------------------------------------------------
// Make the project
// ----------------------------------------------------------------------
project.setGroupId( groupId );
project.setArtifactId( artifactId );
project.setVersion( version );
project.setName( name );
project.setScmUrl( scmConnection );
project.setNotifiers( notifiers );
}
// ----------------------------------------------------------------------
//
// ----------------------------------------------------------------------
private String getValue( Xpp3Dom dom, String key, String defaultValue )
{
Xpp3Dom child = dom.getChild( key );
if ( child == null )
{
return defaultValue;
}
return child.getValue();
}
}
|
continuum-core/src/main/java/org/apache/maven/continuum/execution/maven/m1/DefaultMavenOneMetadataHelper.java
|
package org.apache.maven.continuum.execution.maven.m1;
/*
* Copyright 2004-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.continuum.model.project.Project;
import org.apache.maven.continuum.model.project.ProjectNotifier;
import org.apache.maven.continuum.notification.ContinuumRecipientSource;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.codehaus.plexus.util.xml.Xpp3DomBuilder;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
/**
* @author <a href="mailto:trygvis@inamo.no">Trygve Laugstøl</a>
* @version $Id$
*/
public class DefaultMavenOneMetadataHelper
extends AbstractLogEnabled
implements MavenOneMetadataHelper
{
// ----------------------------------------------------------------------
// MavenOneMetadataHelper Implementation
// ----------------------------------------------------------------------
public void mapMetadata( File metadata, Project project )
throws MavenOneMetadataHelperException
{
Xpp3Dom mavenProject;
try
{
mavenProject = Xpp3DomBuilder.build( new FileReader( metadata ) );
}
catch ( Exception e )
{
throw new MavenOneMetadataHelperException( "Error while reading maven POM.", e );
}
// ----------------------------------------------------------------------
// We cannot deal with projects that use the <extend/> element because
// we don't have the whole source tree and we might be missing elements
// that are present in the parent.
// ----------------------------------------------------------------------
String extend = getValue( mavenProject, "extend", null );
if ( extend != null )
{
throw new MavenOneMetadataHelperException( "Cannot use a POM with an 'extend' element." );
}
// ----------------------------------------------------------------------
// Artifact and group id
// ----------------------------------------------------------------------
String groupId;
String artifactId;
String id = getValue( mavenProject, "id", null );
if ( !StringUtils.isEmpty( id ) )
{
groupId = id;
artifactId = id;
}
else
{
groupId = getValue( mavenProject, "groupId", project.getGroupId() );
if ( StringUtils.isEmpty( groupId ) )
{
throw new MavenOneMetadataHelperException( "Missing 'groupId' element in the POM." );
}
artifactId = getValue( mavenProject, "artifactId", project.getArtifactId() );
if ( StringUtils.isEmpty( artifactId ) )
{
throw new MavenOneMetadataHelperException( "Missing 'artifactId' element in the POM." );
}
}
// ----------------------------------------------------------------------
// version
// ----------------------------------------------------------------------
String version = getValue( mavenProject, "currentVersion", project.getVersion() );
if ( StringUtils.isEmpty( project.getVersion() ) && StringUtils.isEmpty( version ) )
{
throw new MavenOneMetadataHelperException( "Missing 'version' element in the POM." );
}
// ----------------------------------------------------------------------
// name
// ----------------------------------------------------------------------
String name = getValue( mavenProject, "name", project.getName() );
if ( StringUtils.isEmpty( project.getName() ) && StringUtils.isEmpty( name ) )
{
throw new MavenOneMetadataHelperException( "Missing 'name' element in POM." );
}
// ----------------------------------------------------------------------
// scm
// ----------------------------------------------------------------------
Xpp3Dom repository = mavenProject.getChild( "repository" );
String scmConnection;
if ( repository == null )
{
if ( !StringUtils.isEmpty( project.getScmUrl() ) )
{
scmConnection = project.getScmUrl();
}
else
{
throw new MavenOneMetadataHelperException( "Missing 'repository' element in the POM." );
}
}
else
{
scmConnection = getValue( repository, "developerConnection", project.getScmUrl() );
scmConnection = getValue( repository, "connection", scmConnection );
if ( StringUtils.isEmpty( scmConnection ) )
{
throw new MavenOneMetadataHelperException(
"Missing both anonymous and developer SCM connection URLs." );
}
}
// ----------------------------------------------------------------------
// notifiers
// ----------------------------------------------------------------------
Xpp3Dom build = mavenProject.getChild( "build" );
List notifiers = null;
ProjectNotifier notifier = new ProjectNotifier();
if ( build == null )
{
if ( project.getNotifiers() != null && !project.getNotifiers().isEmpty() )
{
notifiers = project.getNotifiers();
}
else
{
throw new MavenOneMetadataHelperException( "Missing 'build' element in the POM." );
}
}
else
{
String currentNagEmailAddress = null;
if ( project.getNotifiers() != null && !project.getNotifiers().isEmpty() )
{
for ( Iterator i = project.getNotifiers().iterator(); i.hasNext(); )
{
ProjectNotifier notif = (ProjectNotifier) i.next();
// Can we have an other type for maven 1 project?
if ( "mail".equals( notif.getType() ) )
{
currentNagEmailAddress = (String) notif.getConfiguration().get(
ContinuumRecipientSource.ADDRESS_FIELD );
}
}
}
String nagEmailAddress = getValue( build, "nagEmailAddress", currentNagEmailAddress );
if ( nagEmailAddress != null )
{
Properties props = new Properties();
props.put( ContinuumRecipientSource.ADDRESS_FIELD, nagEmailAddress );
notifier.setConfiguration( props );
}
}
if ( notifiers == null && notifier.getConfiguration().isEmpty() )
{
throw new MavenOneMetadataHelperException(
"Missing 'nagEmailAddress' element in the 'build' element in the POM." );
}
else
{
if ( notifiers == null )
{
notifiers = new ArrayList();
}
notifiers.add( notifier );
}
// ----------------------------------------------------------------------
// Make the project
// ----------------------------------------------------------------------
project.setGroupId( groupId );
project.setArtifactId( artifactId );
project.setVersion( version );
project.setName( name );
project.setScmUrl( scmConnection );
project.setNotifiers( notifiers );
}
// ----------------------------------------------------------------------
//
// ----------------------------------------------------------------------
private String getValue( Xpp3Dom dom, String key, String defaultValue )
{
Xpp3Dom child = dom.getChild( key );
if ( child == null )
{
return defaultValue;
}
return child.getValue();
}
}
|
o Add developers
o Add dependencies
o Fix notifiers initialization
git-svn-id: 1d22bf2b43db35b985fe5d7437c243537c14eeaa@291976 13f79535-47bb-0310-9956-ffa450edef68
|
continuum-core/src/main/java/org/apache/maven/continuum/execution/maven/m1/DefaultMavenOneMetadataHelper.java
|
o Add developers o Add dependencies o Fix notifiers initialization
|
|
Java
|
bsd-2-clause
|
13171aa1f3e0829a20946013ef40ecac5ae5e453
| 0
|
JFormDesigner/markdown-writer-fx,JFormDesigner/markdown-writer-fx,JFormDesigner/markdown-writer-fx
|
/*
* Copyright (c) 2015 Karl Tauber <karl at jformdesigner dot com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.markdownwriterfx.editor;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import javafx.application.Platform;
import org.fxmisc.richtext.StyleClassedTextArea;
import org.fxmisc.richtext.StyleSpans;
import org.fxmisc.richtext.StyleSpansBuilder;
import org.pegdown.ast.*;
/**
* Markdown syntax highlighter.
*
* Uses pegdown AST.
*
* @author Karl Tauber
*/
class MarkdownSyntaxHighlighter
implements Visitor
{
private enum StyleClass {
strong,
em,
// headers
h1,
h2,
h3,
h4,
h5,
h6,
};
/**
* style bits (1 << StyleClass.ordinal()) for each character
* simplifies implementation of overlapping styles
*/
private int[] styleClassBits;
static void highlight(StyleClassedTextArea textArea, RootNode astRoot) {
assert StyleClass.values().length <= 32;
assert Platform.isFxApplicationThread();
textArea.setStyleSpans(0, new MarkdownSyntaxHighlighter()
.computeHighlighting(astRoot, textArea.getLength()));
}
private MarkdownSyntaxHighlighter() {
}
private StyleSpans<Collection<String>> computeHighlighting(RootNode astRoot, int textLength) {
styleClassBits = new int[textLength];
// visit all nodes
astRoot.accept(this);
// build style spans
StyleSpansBuilder<Collection<String>> spansBuilder = new StyleSpansBuilder<>();
if (styleClassBits.length > 0) {
int spanStart = 0;
int previousBits = styleClassBits[0];
for (int i = 1; i < styleClassBits.length; i++) {
int bits = styleClassBits[i];
if (bits == previousBits)
continue;
spansBuilder.add(toStyleClasses(previousBits), i - spanStart);
spanStart = i;
previousBits = bits;
}
spansBuilder.add(toStyleClasses(previousBits), styleClassBits.length - spanStart);
} else
spansBuilder.add(Collections.emptyList(), 0);
return spansBuilder.create();
}
private Collection<String> toStyleClasses(int bits) {
if (bits == 0)
return Collections.emptyList();
Collection<String> styleClasses = new ArrayList<>(1);
for (StyleClass styleClass : StyleClass.values()) {
if ((bits & (1 << styleClass.ordinal())) != 0)
styleClasses.add(styleClass.name());
}
return styleClasses;
}
@Override
public void visit(AbbreviationNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(AnchorLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(AutoLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(BlockQuoteNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(BulletListNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(CodeNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(DefinitionListNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(DefinitionNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(DefinitionTermNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ExpImageNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ExpLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(HeaderNode node) {
StyleClass styleClass;
switch (node.getLevel()) {
case 1: styleClass = StyleClass.h1; break;
case 2: styleClass = StyleClass.h2; break;
case 3: styleClass = StyleClass.h3; break;
case 4: styleClass = StyleClass.h4; break;
case 5: styleClass = StyleClass.h5; break;
case 6: styleClass = StyleClass.h6; break;
default: return;
}
setStyleClass(node, styleClass);
visitChildren(node);
}
@Override
public void visit(HtmlBlockNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(InlineHtmlNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ListItemNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(MailLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(OrderedListNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ParaNode node) {
// TODO Auto-generated method stub
visitChildren(node);
}
@Override
public void visit(QuotedNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ReferenceNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(RefImageNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(RefLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(RootNode node) {
// TODO Auto-generated method stub
visitChildren(node);
}
@Override
public void visit(SimpleNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(SpecialTextNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(StrikeNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(StrongEmphSuperNode node) {
setStyleClass(node, node.isStrong() ? StyleClass.strong : StyleClass.em);
}
@Override
public void visit(TableBodyNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableCaptionNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableCellNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableColumnNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableHeaderNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableRowNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(VerbatimNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(WikiLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TextNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(SuperNode node) {
// TODO Auto-generated method stub
visitChildren(node);
}
@Override
public void visit(Node node) {
// TODO Auto-generated method stub
}
private void visitChildren(SuperNode node) {
for (Node child : node.getChildren())
child.accept(this);
}
private void setStyleClass(Node node, StyleClass styleClass) {
// because PegDownProcessor.prepareSource() adds two trailing newlines
// to the text before parsing, we need to limit the end index
int start = node.getStartIndex();
int end = Math.min(node.getEndIndex(), styleClassBits.length);
int styleBit = 1 << styleClass.ordinal();
for (int i = start; i < end; i++)
styleClassBits[i] |= styleBit;
}
}
|
src/main/java/org/markdownwriterfx/editor/MarkdownSyntaxHighlighter.java
|
/*
* Copyright (c) 2015 Karl Tauber <karl at jformdesigner dot com>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.markdownwriterfx.editor;
import java.util.Collection;
import java.util.Collections;
import javafx.application.Platform;
import org.fxmisc.richtext.StyleClassedTextArea;
import org.fxmisc.richtext.StyleSpans;
import org.fxmisc.richtext.StyleSpansBuilder;
import org.pegdown.ast.*;
/**
* Markdown syntax highlighter.
*
* Uses pegdown AST.
*
* @author Karl Tauber
*/
class MarkdownSyntaxHighlighter
implements Visitor
{
private int textLength;
private StyleSpansBuilder<Collection<String>> spansBuilder;
private int nextIndex;
static void highlight(StyleClassedTextArea textArea, RootNode astRoot) {
assert Platform.isFxApplicationThread();
textArea.setStyleSpans(0, new MarkdownSyntaxHighlighter()
.computeHighlighting(astRoot, textArea.getLength()));
}
private MarkdownSyntaxHighlighter() {
}
private StyleSpans<Collection<String>> computeHighlighting(RootNode astRoot, int textLength) {
this.textLength = textLength;
spansBuilder = new StyleSpansBuilder<>();
nextIndex = 0;
astRoot.accept(this);
spansBuilder.add(Collections.emptyList(), textLength - nextIndex);
return spansBuilder.create();
}
@Override
public void visit(AbbreviationNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(AnchorLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(AutoLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(BlockQuoteNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(BulletListNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(CodeNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(DefinitionListNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(DefinitionNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(DefinitionTermNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ExpImageNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ExpLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(HeaderNode node) {
setStyleClass(node, "h" + node.getLevel());
}
@Override
public void visit(HtmlBlockNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(InlineHtmlNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ListItemNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(MailLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(OrderedListNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ParaNode node) {
// TODO Auto-generated method stub
visitChildren(node);
}
@Override
public void visit(QuotedNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(ReferenceNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(RefImageNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(RefLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(RootNode node) {
// TODO Auto-generated method stub
visitChildren(node);
}
@Override
public void visit(SimpleNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(SpecialTextNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(StrikeNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(StrongEmphSuperNode node) {
setStyleClass(node, node.isStrong() ? "strong" : "em");
}
@Override
public void visit(TableBodyNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableCaptionNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableCellNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableColumnNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableHeaderNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TableRowNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(VerbatimNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(WikiLinkNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(TextNode node) {
// TODO Auto-generated method stub
}
@Override
public void visit(SuperNode node) {
// TODO Auto-generated method stub
visitChildren(node);
}
@Override
public void visit(Node node) {
// TODO Auto-generated method stub
}
private void visitChildren(SuperNode node) {
for (Node child : node.getChildren())
child.accept(this);
}
private void setStyleClass(Node node, String styleClass) {
// because PegDownProcessor.prepareSource() adds two trailing newlines
// to the text before parsing, we need to limit the end index
int startIndex = node.getStartIndex();
int endIndex = Math.min(node.getEndIndex(), textLength);
spansBuilder.add(Collections.emptyList(), startIndex - nextIndex);
spansBuilder.add(Collections.singleton(styleClass), endIndex - startIndex);
nextIndex = endIndex;
}
}
|
MarkdownSyntaxHighlighter: support overlapping styles
|
src/main/java/org/markdownwriterfx/editor/MarkdownSyntaxHighlighter.java
|
MarkdownSyntaxHighlighter: support overlapping styles
|
|
Java
|
bsd-3-clause
|
1ffafa657716c29ac459deea557904de14289be6
| 0
|
oci-pronghorn/GreenLightning,oci-pronghorn/GreenLightning,oci-pronghorn/GreenLightning
|
package com.ociweb.gl.api;
import com.ociweb.gl.impl.*;
import com.ociweb.gl.impl.schema.MessageSubscription;
import com.ociweb.gl.impl.schema.TrafficOrderSchema;
import com.ociweb.gl.impl.stage.EgressConverter;
import com.ociweb.gl.impl.stage.IngressConverter;
import com.ociweb.gl.impl.stage.ReactiveListenerStage;
import com.ociweb.gl.impl.stage.ReactiveManagerPipeConsumer;
import com.ociweb.pronghorn.network.NetGraphBuilder;
import com.ociweb.pronghorn.network.ServerCoordinator;
import com.ociweb.pronghorn.network.ServerPipesConfig;
import com.ociweb.pronghorn.network.http.HTTP1xRouterStageConfig;
import com.ociweb.pronghorn.network.module.FileReadModuleStage;
import com.ociweb.pronghorn.network.schema.HTTPRequestSchema;
import com.ociweb.pronghorn.network.schema.NetPayloadSchema;
import com.ociweb.pronghorn.network.schema.NetResponseSchema;
import com.ociweb.pronghorn.network.schema.ServerResponseSchema;
import com.ociweb.pronghorn.pipe.DataInputBlobReader;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.pipe.PipeConfig;
import com.ociweb.pronghorn.pipe.PipeConfigManager;
import com.ociweb.pronghorn.pipe.util.hash.IntHashTable;
import com.ociweb.pronghorn.stage.PronghornStage;
import com.ociweb.pronghorn.stage.route.ReplicatorStage;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import com.ociweb.pronghorn.stage.scheduling.NonThreadScheduler;
import com.ociweb.pronghorn.stage.scheduling.ScriptedFixedThreadsScheduler;
import com.ociweb.pronghorn.stage.scheduling.StageScheduler;
import com.ociweb.pronghorn.stage.test.PipeCleanerStage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class MsgRuntime<B extends BuilderImpl, L extends ListenerFilter> {
public static final Logger logger = LoggerFactory.getLogger(MsgRuntime.class);
protected static final int nsPerMS = 1_000_000;
public B builder;
protected final GraphManager gm;
protected final String[] args;
private StageScheduler scheduler;
protected String telemetryHost;
protected void setScheduler(StageScheduler scheduler) {
this.scheduler = scheduler;
}
//NOTE: keep short since the MessagePubSubStage will STOP consuming message until the one put on here
// is actually taken off and consumed. We have little benefit to making this longer.
protected static final int defaultCommandChannelSubscriberLength = 8;
protected static final int defaultCommandChannelLength = 16;
protected static final int defaultCommandChannelMaxPayload = 256; //largest i2c request or pub sub payload
protected static final int defaultCommandChannelHTTPMaxPayload = 1<<14; //must be at least 32K for TLS support
protected boolean transducerAutowiring = true;
private PipeConfig<HTTPRequestSchema> fileRequestConfig;// = builder.restPipeConfig.grow2x();
private int netResponsePipeIdxCounter = 0;//this implementation is dependent upon graphManager returning the pipes in the order created!
protected int netResponsePipeIdx = -1;
protected int subscriptionPipeIdx = 0; //this implementation is dependent upon graphManager returning the pipes in the order created!
protected final IntHashTable subscriptionPipeLookup = new IntHashTable(10);//NOTE: this is a maximum of 1024 listeners
private BridgeConfig[] bridges = new BridgeConfig[0];
protected int parallelInstanceUnderActiveConstruction = -1;
protected Pipe<?>[] outputPipes = null;
protected ChildClassScannerVisitor gatherPipesVisitor = new ChildClassScannerVisitor<MsgCommandChannel>() {
@Override
public boolean visit(MsgCommandChannel cmdChnl, Object topParent) {
IntHashTable usageChecker = getUsageChecker();
if (null!=usageChecker) {
if (!ChildClassScanner.notPreviouslyHeld(cmdChnl, topParent, usageChecker)) {
logger.error("Command channel found in "+
topParent.getClass().getSimpleName()+
" can not be used in more than one Behavior");
assert(false) : "A CommandChannel instance can only be used exclusivly by one object or lambda. Double check where CommandChannels are passed in.";
}
}
MsgCommandChannel.setListener(cmdChnl, (Behavior)topParent);
//add this to the count of publishers
//CharSequence[] supportedTopics = cmdChnl.supportedTopics();
//get count of subscribers per topic as well.
//get the pipe ID of the singular PubSub...
outputPipes = PronghornStage.join(outputPipes, cmdChnl.getOutputPipes());
return true;//keep looking
}
};
public void disableTransducerAutowiring() {
transducerAutowiring = false;
}
private void keepBridge(BridgeConfig bridge) {
boolean isFound = false;
int i = bridges.length;
while (--i>=0) {
isFound |= bridge == bridges[0];
}
if (!isFound) {
i = bridges.length;
BridgeConfig[] newArray = new BridgeConfig[i+1];
System.arraycopy(bridges, 0, newArray, 0, i);
newArray[i] = bridge;
bridges = newArray;
}
}
public MsgRuntime(String[] args, String name) {
this.gm = new GraphManager(name);
this.args = args != null ? args : new String[0];
}
public String[] args() {
return args;
}
public final <T,S> S bridgeSubscription(CharSequence topic, BridgeConfig<T,S> config) {
long id = config.addSubscription(topic);
keepBridge(config);
return config.subscriptionConfigurator(id);
}
public final <T,S> S bridgeSubscription(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> config) {
long id = config.addSubscription(internalTopic,extrnalTopic);
keepBridge(config);
return config.subscriptionConfigurator(id);
}
public final <T,S> S bridgeSubscription(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> config, IngressConverter converter) {
long id = config.addSubscription(internalTopic,extrnalTopic,converter);
keepBridge(config);
return config.subscriptionConfigurator(id);
}
public final <T,S> T bridgeTransmission(CharSequence topic, BridgeConfig<T,S> config) {
long id = config.addTransmission(this, topic);
keepBridge(config);
return config.transmissionConfigurator(id);
}
public final <T,S> T bridgeTransmission(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> bridge) {
long id = bridge.addTransmission(this, internalTopic,extrnalTopic);
keepBridge(bridge);
return bridge.transmissionConfigurator(id);
}
public final <T,S> T bridgeTransmission(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> config, EgressConverter converter) {
long id = config.addTransmission(this, internalTopic,extrnalTopic, converter);
keepBridge(config);
return config.transmissionConfigurator(id);
}
public final L addRestListener(RestListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addResponseListener(HTTPResponseListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addStartupListener(StartupListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addShutdownListener(ShutdownListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addTimePulseListener(TimeListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addPubSubListener(PubSubListener listener) {
return (L) registerListenerImpl(listener);
}
public final <E extends Enum<E>> L addStateChangeListener(StateChangeListener<E> listener) {
return (L) registerListenerImpl(listener);
}
public L registerListener(Behavior listener) {
return (L) registerListenerImpl(listener);
}
/////
public final L addRestListener(String id, RestListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addResponseListener(String id, HTTPResponseListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addStartupListener(String id, StartupListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addShutdownListener(String id, ShutdownListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addTimePulseListener(String id, TimeListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addPubSubListener(String id, PubSubListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final <E extends Enum<E>> L addStateChangeListener(String id, StateChangeListener<E> listener) {
return (L) registerListenerImpl(id, listener);
}
public L registerListener(String id, Behavior listener) {
return (L) registerListenerImpl(id, listener);
}
public long fieldId(int routeId, byte[] fieldName) {
return builder.fieldId(routeId, fieldName);
}
protected void logStageScheduleRates() {
int totalStages = GraphManager.countStages(gm);
for(int i=1;i<=totalStages;i++) {
PronghornStage s = GraphManager.getStage(gm, i);
if (null != s) {
Object rate = GraphManager.getNota(gm, i, GraphManager.SCHEDULE_RATE, null);
if (null == rate) {
logger.debug("{} is running without breaks",s);
} else {
logger.debug("{} is running at rate of {}",s,rate);
}
}
}
}
public String getArgumentValue(String longName, String shortName, String defaultValue) {
return getOptArg(longName,shortName, args, defaultValue);
}
public boolean hasArgument(String longName, String shortName) {
return hasArg(longName, shortName, this.args);
}
public static String getOptArg(String longName, String shortName, String[] args, String defaultValue) {
String prev = null;
for (String token : args) {
if (longName.equals(prev) || shortName.equals(prev)) {
if (token == null || token.trim().length() == 0 || token.startsWith("-")) {
return defaultValue;
}
return reportChoice(longName, shortName, token.trim());
}
prev = token;
}
return reportChoice(longName, shortName, defaultValue);
}
public static boolean hasArg(String longName, String shortName, String[] args) {
for(String token : args) {
if(longName.equals(token) || shortName.equals(token)) {
reportChoice(longName, shortName, "");
return true;
}
}
return false;
}
static String reportChoice(final String longName, final String shortName, final String value) {
System.out.append(longName).append(" ").append(shortName).append(" ").append(value).append("\n");
return value;
}
protected void configureStageRate(Object listener, ReactiveListenerStage stage) {
//if we have a time event turn it on.
long rate = builder.getTriggerRate();
if (rate>0 && listener instanceof TimeListener) {
stage.setTimeEventSchedule(rate, builder.getTriggerStart());
//Since we are using the time schedule we must set the stage to be faster
long customRate = (rate*nsPerMS)/NonThreadScheduler.granularityMultiplier;// in ns and guanularityXfaster than clock trigger
long appliedRate = Math.min(customRate,builder.getDefaultSleepRateNS());
GraphManager.addNota(gm, GraphManager.SCHEDULE_RATE, appliedRate, stage);
}
}
public StageScheduler getScheduler() {
return scheduler;
}
public void shutdownRuntime() {
shutdownRuntime(3);
}
public void shutdownRuntime(final int secondsTimeout) {
//only do if not already done.
if (!isShutdownRequested()) {
if (null == scheduler || null == builder) {
System.exit(0);
return;
}
final Runnable lastCall = new Runnable() {
@Override
public void run() {
//all the software has now stopped so shutdown the hardware now.
builder.shutdown();
}
};
//notify all the reactors to begin shutdown.
ReactiveListenerStage.requestSystemShutdown(builder, new Runnable() {
@Override
public void run() {
scheduler.shutdown();
scheduler.awaitTermination(secondsTimeout, TimeUnit.SECONDS, lastCall, lastCall);
}
});
}
}
public boolean isShutdownRequested() {
return ReactiveListenerStage.isShutdownRequested(builder);
}
//////////
//only build this when assertions are on
//////////
public static IntHashTable cmdChannelUsageChecker;
static {
assert(setupForChannelAssertCheck());
}
private static boolean setupForChannelAssertCheck() {
cmdChannelUsageChecker = new IntHashTable(9);
return true;
}
private static IntHashTable getUsageChecker() {
return cmdChannelUsageChecker;
}
protected int addGreenPipesCount(Behavior listener, int pipesCount) {
if (this.builder.isListeningToHTTPResponse(listener)) {
pipesCount++; //these are calls to URL responses
}
if ( (!this.builder.isAllPrivateTopics())
&& this.builder.isListeningToSubscription(listener)) {
pipesCount++;
}
if (this.builder.isListeningHTTPRequest(listener)) {
pipesCount += ListenerConfig.computeParallel(builder, parallelInstanceUnderActiveConstruction);
}
return pipesCount;
}
protected void populateGreenPipes(Behavior listener, int pipesCount, Pipe<?>[] inputPipes) {
//if this listener is an HTTP listener then add its behavior id for this pipe
if (this.builder.isListeningToHTTPResponse(listener)) {
inputPipes[--pipesCount] = buildNetResponsePipe();
netResponsePipeIdx = netResponsePipeIdxCounter++;
builder.registerHTTPClientId(builder.behaviorId(listener), netResponsePipeIdx);
}
if ((!this.builder.isAllPrivateTopics())
&& this.builder.isListeningToSubscription(listener)) {
inputPipes[--pipesCount] = buildPublishPipe(listener);
}
//if we push to this 1 pipe all the requests...
//JoinStage to take N inputs and produce 1 output.
//we use splitter for single pipe to 2 databases
//we use different group for parallel processing
//for mutiple we must send them all to the reactor.
if (this.builder.isListeningHTTPRequest(listener) ) {
Pipe<HTTPRequestSchema>[] httpRequestPipes;
httpRequestPipes = ListenerConfig.newHTTPRequestPipes(builder, ListenerConfig.computeParallel(builder, parallelInstanceUnderActiveConstruction));
int i = httpRequestPipes.length;
assert(i>0) : "This listens to Rest requests but none have been routed here";
while (--i >= 0) {
inputPipes[--pipesCount] = httpRequestPipes[i];
}
}
}
private Pipe<NetResponseSchema> buildNetResponsePipe() {
Pipe<NetResponseSchema> netResponsePipe = new Pipe<NetResponseSchema>(builder.pcm.getConfig(NetResponseSchema.class)) {
@SuppressWarnings("unchecked")
@Override
protected DataInputBlobReader<NetResponseSchema> createNewBlobReader() {
return new HTTPResponseReader(this);//, gm.recordTypeData);
}
};
return netResponsePipe;
}
/**
* This pipe returns all the data this object has requested via subscriptions elsewhere.
* @param listener
*/
public Pipe<MessageSubscription> buildPublishPipe(Object listener) {
assert(!builder.isAllPrivateTopics()) : "must not call when private topics are exclusivly in use";
if (builder.isAllPrivateTopics()) {
throw new RuntimeException("oops");
}
Pipe<MessageSubscription> subscriptionPipe = buildMessageSubscriptionPipe();
//store this value for lookup later
//logger.info("adding hash listener {} to pipe ",System.identityHashCode(listener));
if (!IntHashTable.setItem(subscriptionPipeLookup, System.identityHashCode(listener), subscriptionPipeIdx++)) {
throw new RuntimeException("Could not find unique identityHashCode for "+listener.getClass().getCanonicalName());
}
assert(!IntHashTable.isEmpty(subscriptionPipeLookup));
return subscriptionPipe;
}
public Pipe<MessageSubscription> buildPublishPipe(int listenerHash) {
assert(!builder.isAllPrivateTopics()) : "must not call when private topics are exclusivly in use";
if (builder.isAllPrivateTopics()) {
throw new RuntimeException("oops");
}
Pipe<MessageSubscription> subscriptionPipe = buildMessageSubscriptionPipe();
if (!IntHashTable.setItem(subscriptionPipeLookup, listenerHash, subscriptionPipeIdx++)) {
throw new RuntimeException("HashCode must be unique");
}
assert(!IntHashTable.isEmpty(subscriptionPipeLookup));
return subscriptionPipe;
}
private Pipe<MessageSubscription> buildMessageSubscriptionPipe() {
Pipe<MessageSubscription> subscriptionPipe = new Pipe<MessageSubscription>(builder.pcm.getConfig(MessageSubscription.class)) {
@SuppressWarnings("unchecked")
@Override
protected DataInputBlobReader<MessageSubscription> createNewBlobReader() {
return new MessageReader(this);//, gm.recordTypeData);
}
};
return subscriptionPipe;
}
protected void constructingParallelInstance(int i) {
parallelInstanceUnderActiveConstruction = i;
}
protected void constructingParallelInstancesEnding() {
parallelInstanceUnderActiveConstruction = -1;
}
//////////////////
//server and other behavior
//////////////////
@SuppressWarnings("unchecked")
public void declareBehavior(MsgApp app) {
//The server and telemetry http hosts/ports MUST be defined before we begin
//the declaration of behaviors because we must do binding to the host names.
//as a result this finalize must happen early.
builder.finalizeDeclareConnections();
////////////////////////////////////////////
if (builder.getHTTPServerConfig() != null) {
buildGraphForServer(app);
} else {
app.declareBehavior(this);
if (app instanceof MsgAppParallel) {
int parallelism = builder.parallelTracks();
//since server was not started and did not create each parallel instance this will need to be done here
for(int i = 0;i<parallelism;i++) { //do not use this loop, we will loop inside server setup..
constructingParallelInstance(i);
((MsgAppParallel)app).declareParallelBehavior(this);
}
}
}
constructingParallelInstancesEnding();
//Init bridges
int b = bridges.length;
while (--b>=0) {
((BridgeConfigImpl)bridges[b]).finalizeDeclareConnections(this);
}
}
private void buildGraphForServer(MsgApp app) {
HTTPServerConfig config = builder.getHTTPServerConfig();
ServerPipesConfig serverConfig = config.buildServerConfig(builder.parallelTracks());
ServerCoordinator serverCoord = new ServerCoordinator(
config.getCertificates(),
config.bindHost(),
config.bindPort(),
serverConfig.maxConnectionBitsOnServer,
serverConfig.maxConcurrentInputs,
serverConfig.maxConcurrentOutputs,
builder.parallelTracks(), false,
"Server",
config.defaultHostPath());
final int routerCount = builder.parallelTracks();
final Pipe<NetPayloadSchema>[] encryptedIncomingGroup = Pipe.buildPipes(serverConfig.maxConcurrentInputs, serverConfig.incomingDataConfig);
Pipe[] acks = NetGraphBuilder.buildSocketReaderStage(gm, serverCoord, routerCount, serverConfig, encryptedIncomingGroup);
Pipe[] handshakeIncomingGroup=null;
Pipe[] planIncomingGroup;
if (config.isTLS()) {
planIncomingGroup = Pipe.buildPipes(serverConfig.maxConcurrentInputs, serverConfig.incomingDataConfig);
handshakeIncomingGroup = NetGraphBuilder.populateGraphWithUnWrapStages(gm, serverCoord,
serverConfig.serverRequestUnwrapUnits, serverConfig.handshakeDataConfig,
encryptedIncomingGroup, planIncomingGroup, acks);
} else {
planIncomingGroup = encryptedIncomingGroup;
}
//Must call here so the beginning stages of the graph are drawn first when exporting graph.
app.declareBehavior(this);
buildLastHalfOfGraphForServer(app, serverConfig, serverCoord, routerCount,
acks, handshakeIncomingGroup, planIncomingGroup);
}
private void buildLastHalfOfGraphForServer(MsgApp app, ServerPipesConfig serverConfig,
ServerCoordinator serverCoord, final int routerCount, Pipe[] acks,
Pipe[] handshakeIncomingGroup,
Pipe[] planIncomingGroup) {
////////////////////////
//create the working modules
//////////////////////////
if (app instanceof MsgAppParallel) {
int p = builder.parallelTracks();
for (int i = 0; i < p; i++) {
constructingParallelInstance(i);
((MsgAppParallel)app).declareParallelBehavior(this); //this creates all the modules for this parallel instance
}
} else {
if (builder.parallelTracks()>1) {
throw new UnsupportedOperationException(
"Remove call to parallelism("+builder.parallelTracks()+") OR make the application implement GreenAppParallel or something extending it.");
}
}
//////////////////
//////////////////
HTTP1xRouterStageConfig routerConfig = builder.routerConfig();
ArrayList<Pipe> forPipeCleaner = new ArrayList<Pipe>();
Pipe<HTTPRequestSchema>[][] fromRouterToModules = new Pipe[routerCount][];
int t = routerCount;
int totalRequestPipes = 0;
while (--t>=0) {
//[router/parallel] then [parser/routes]
int path = routerConfig.totalPathsCount();
/////////////////
///for catch all
///////////////
if (path==0) {
path=1;
}
/////////////
fromRouterToModules[t] = new Pipe[path];
while (--path >= 0) {
ArrayList<Pipe<HTTPRequestSchema>> requestPipes = builder.buildFromRequestArray(t, path);
//with a single pipe just pass it one, otherwise use the replicator to fan out from a new single pipe.
int size = requestPipes.size();
totalRequestPipes += size;
if (1==size) {
fromRouterToModules[t][path] =
requestPipes.get(0);
} else {
//we only create a pipe when we are about to use the replicator
fromRouterToModules[t][path] =
builder.newHTTPRequestPipe(builder.pcm.getConfig(HTTPRequestSchema.class));
if (0==size) {
logger.info("warning there are routes without any consumers");
//we have no consumer so tie it to pipe cleaner
forPipeCleaner.add(fromRouterToModules[t][path]);
} else {
ReplicatorStage.newInstance(gm, fromRouterToModules[t][path], requestPipes.toArray(new Pipe[requestPipes.size()]));
}
}
}
if (0==totalRequestPipes) {
logger.warn("ERROR: includeRoutes or includeAllRoutes must be called on REST listener.");
}
}
if (!forPipeCleaner.isEmpty()) {
PipeCleanerStage.newInstance(gm, forPipeCleaner.toArray(new Pipe[forPipeCleaner.size()]));
}
//NOTE: building arrays of pipes grouped by parallel/routers heading out to order supervisor
Pipe<ServerResponseSchema>[][] fromModulesToOrderSuper = new Pipe[routerCount][];
Pipe<ServerResponseSchema>[] errorResponsePipes = new Pipe[routerCount];
PipeConfig<ServerResponseSchema> errConfig = ServerResponseSchema.instance.newPipeConfig(4, 512);
int r = routerCount;
while (--r>=0) {
errorResponsePipes[r] = new Pipe<ServerResponseSchema>(errConfig);
Pipe<ServerResponseSchema>[] temp =
fromModulesToOrderSuper[r] = PronghornStage.join(errorResponsePipes[r], builder.buildToOrderArray(r));
//this block is required to make sure the ordering stage has room
int c = temp.length;
while (--c>=0) {
//ensure that the ordering stage can consume messages of this size
serverConfig.ensureServerCanWrite(temp[c].config().maxVarLenSize());
}
}
boolean catchAll = builder.routerConfig().totalPathsCount()==0;
NetGraphBuilder.buildRouters(gm, planIncomingGroup, acks, fromRouterToModules,
errorResponsePipes, routerConfig, serverCoord,
catchAll);
//NOTE: this array populated here must be equal or larger than the fromModules..
Pipe<NetPayloadSchema>[] fromOrderedContent = NetGraphBuilder.buildRemainderOFServerStages(gm, serverCoord, serverConfig, handshakeIncomingGroup);
//NOTE: the fromOrderedContent must hold var len data which is greater than fromModulesToOrderSuper
NetGraphBuilder.buildOrderingSupers(gm, serverCoord, routerCount,
fromModulesToOrderSuper, fromOrderedContent);
}
//////////////////
//end of server and other behavior
//////////////////
public void setExclusiveTopics(MsgCommandChannel cc, String ... exlusiveTopics) {
// TODO Auto-generated method stub
throw new UnsupportedOperationException("Not yet implemented");
}
//Not for general consumption, only used when we need the low level Pipes to connect directly to the pub/sub or other dynamic subsystem.
public static GraphManager getGraphManager(MsgRuntime runtime) {
return runtime.gm;
}
public RouteFilter addFileServer(String path) { //adds server to all routes
final int parallelIndex = (-1 == parallelInstanceUnderActiveConstruction) ? 0 : parallelInstanceUnderActiveConstruction;
//due to internal implementation we must keep the same number of outputs as inputs.
Pipe<HTTPRequestSchema>[] inputs = new Pipe[1];
Pipe<ServerResponseSchema>[] outputs = new Pipe[1];
populateHTTPInOut(inputs, outputs, 0, parallelIndex);
File rootPath = buildFilePath(path);
FileReadModuleStage.newInstance(gm, inputs, outputs, builder.httpSpec, rootPath);
return new StageRouteFilter(inputs[0], builder, parallelIndex);
}
public RouteFilter addFileServer(String resourceRoot, String resourceDefault) {
final int parallelIndex = (-1 == parallelInstanceUnderActiveConstruction) ? 0 : parallelInstanceUnderActiveConstruction;
//due to internal implementation we must keep the same number of outputs as inputs.
Pipe<HTTPRequestSchema>[] inputs = new Pipe[1];
Pipe<ServerResponseSchema>[] outputs = new Pipe[1];
populateHTTPInOut(inputs, outputs, 0, parallelIndex);
FileReadModuleStage.newInstance(gm, inputs, outputs, builder.httpSpec, resourceRoot, resourceDefault);
return new StageRouteFilter(inputs[0], builder, parallelIndex);
}
private void populateHTTPInOut(Pipe<HTTPRequestSchema>[] inputs,
Pipe<ServerResponseSchema>[] outputs,
int idx, int parallelIndex) {
if (null == fileRequestConfig) {
fileRequestConfig = builder.pcm.getConfig(HTTPRequestSchema.class).grow2x();
}
inputs[idx] = builder.newHTTPRequestPipe(fileRequestConfig);
outputs[idx] = builder.newNetResponsePipe(builder.pcm.getConfig(ServerResponseSchema.class), parallelIndex);
}
//TODO: needs a lot of re-work.
private File buildFilePath(String path) {
// Strip URL space tokens from incoming path strings to avoid issues.
// TODO: This should be made garbage free.
// TODO: Is this expected behavior?
path = path.replaceAll("\\Q%20\\E", " ");
//TODO: MUST FIND PATH...
Enumeration<URL> resource;
try {
resource = ClassLoader.getSystemResources(path);
while (resource.hasMoreElements()) {
System.err.println("looking for resoruce: "+path+" and found "+String.valueOf(resource.nextElement()));
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// ClassLoader.getSystemResource(path);
File rootPath = new File(path);
if (!rootPath.exists()) {
//test if this is under development
File devPath = new File("./src/main/resources/"+path);
if (devPath.exists()) {
rootPath = devPath;
}
}
if (!rootPath.exists()) {
throw new UnsupportedOperationException("Path not found: "+rootPath);
}
return rootPath;
}
///////////////////////////
//end of file server
///////////////////////////
public Builder getBuilder(){
if(this.builder==null){
this.builder = (B) new BuilderImpl(gm,args);
}
return this.builder;
}
private ListenerFilter registerListenerImpl(final Behavior listener) {
return registerListenerImpl(null, listener);
}
private ListenerFilter registerListenerImpl(final String id, final Behavior listener) {
////////////
//OUTPUT
///////////
outputPipes = new Pipe<?>[0];
//extract pipes used by listener and use cmdChannelUsageChecker to confirm its not re-used
ChildClassScanner.visitUsedByClass(listener, gatherPipesVisitor, MsgCommandChannel.class);//populates outputPipes
/////////////
//INPUT
//add green features, count first then create the pipes
//NOTE: that each Behavior is inspected and will find Transducers which need inputs as well
/////////
int pipesCount = addGreenPipesCount(listener, 0);
Pipe<?>[] inputPipes = new Pipe<?>[pipesCount];
populateGreenPipes(listener, pipesCount, inputPipes);
//////////////////////
//////////////////////
//this is empty when transducerAutowiring is off
final ArrayList<ReactiveManagerPipeConsumer> consumers = new ArrayList<ReactiveManagerPipeConsumer>();
//extract this into common method to be called in GL and FL
if (transducerAutowiring) {
inputPipes = autoWireTransducers(listener, inputPipes, consumers);
}
if (null!=id) {
List<PrivateTopic> sourceTopics = builder.getPrivateTopicsFromSource(id);
int i = sourceTopics.size();
while (--i>=0) {
PrivateTopic privateTopic = sourceTopics.get(i);
outputPipes = PronghornStage.join(outputPipes, privateTopic.getPipe(parallelInstanceUnderActiveConstruction));
}
List<PrivateTopic> targetTopics = builder.getPrivateTopicsFromTarget(id);
int j = targetTopics.size();
while (--j>=0) {
PrivateTopic privateTopic = targetTopics.get(j);
inputPipes = PronghornStage.join(inputPipes, privateTopic.getPipe(parallelInstanceUnderActiveConstruction));
}
}
ReactiveListenerStage<?> reactiveListener = builder.createReactiveListener(gm, listener,
inputPipes, outputPipes, consumers,
parallelInstanceUnderActiveConstruction,id);
//finds all the command channels which make use of private topics.
reactiveListener.regPrivateTopics();
if (listener instanceof RestListenerBase) {
GraphManager.addNota(gm, GraphManager.DOT_RANK_NAME, "ModuleStage", reactiveListener);
}
/////////////////////
//StartupListener is not driven by any response data and is called when the stage is started up. no pipe needed.
/////////////////////
//TimeListener, time rate signals are sent from the stages its self and therefore does not need a pipe to consume.
/////////////////////
configureStageRate(listener,reactiveListener);
int testId = -1;
int i = inputPipes.length;
while (--i>=0) {
if (inputPipes[i]!=null && Pipe.isForSchema((Pipe<MessageSubscription>)inputPipes[i], MessageSubscription.class)) {
testId = inputPipes[i].id;
}
}
assert(-1==testId || GraphManager.allPipesOfType(gm, MessageSubscription.instance)[subscriptionPipeIdx-1].id==testId) : "GraphManager has returned the pipes out of the expected order";
return reactiveListener;
}
protected Pipe<?>[] autoWireTransducers(final Behavior listener, Pipe<?>[] inputPipes,
final ArrayList<ReactiveManagerPipeConsumer> consumers) {
if (inputPipes.length==0) {
return inputPipes;//no work since no inputs are used.
}
final Grouper g = new Grouper(inputPipes);
ChildClassScannerVisitor tVisitor = new ChildClassScannerVisitor() {
@Override
public boolean visit(Object child, Object topParent) {
if (g.additions()==0) {
//add first value
Pipe[] pipes = builder.operators.createPipes(builder, listener, g);
consumers.add(new ReactiveManagerPipeConsumer(listener, builder.operators, pipes));
g.add(pipes);
}
int c = consumers.size();
while (--c>=0) {
if (consumers.get(c).obj == child) {
//do not add this one it is already recorded
return true;
}
}
Pipe[] pipes = builder.operators.createPipes(builder, child, g);
consumers.add(new ReactiveManagerPipeConsumer(child, builder.operators, pipes));
g.add(pipes);
return true;
}
};
ChildClassScanner.visitUsedByClass(listener, tVisitor, ListenerTransducer.class);
if (g.additions()>0) {
inputPipes = g.firstArray();
g.buildReplicators(gm, consumers);
}
return inputPipes;
}
protected PipeConfigManager buildPipeManager() {
PipeConfigManager pcm = new PipeConfigManager();
pcm.addConfig(defaultCommandChannelLength,0,TrafficOrderSchema.class );
return pcm;
}
public static IntHashTable getSubPipeLookup(MsgRuntime runtime) {
return runtime.subscriptionPipeLookup;
}
}
|
src/main/java/com/ociweb/gl/api/MsgRuntime.java
|
package com.ociweb.gl.api;
import com.ociweb.gl.impl.*;
import com.ociweb.gl.impl.schema.MessageSubscription;
import com.ociweb.gl.impl.schema.TrafficOrderSchema;
import com.ociweb.gl.impl.stage.EgressConverter;
import com.ociweb.gl.impl.stage.IngressConverter;
import com.ociweb.gl.impl.stage.ReactiveListenerStage;
import com.ociweb.gl.impl.stage.ReactiveManagerPipeConsumer;
import com.ociweb.pronghorn.network.NetGraphBuilder;
import com.ociweb.pronghorn.network.ServerCoordinator;
import com.ociweb.pronghorn.network.ServerPipesConfig;
import com.ociweb.pronghorn.network.http.HTTP1xRouterStageConfig;
import com.ociweb.pronghorn.network.module.FileReadModuleStage;
import com.ociweb.pronghorn.network.schema.HTTPRequestSchema;
import com.ociweb.pronghorn.network.schema.NetPayloadSchema;
import com.ociweb.pronghorn.network.schema.NetResponseSchema;
import com.ociweb.pronghorn.network.schema.ServerResponseSchema;
import com.ociweb.pronghorn.pipe.DataInputBlobReader;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.pipe.PipeConfig;
import com.ociweb.pronghorn.pipe.PipeConfigManager;
import com.ociweb.pronghorn.pipe.util.hash.IntHashTable;
import com.ociweb.pronghorn.stage.PronghornStage;
import com.ociweb.pronghorn.stage.route.ReplicatorStage;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import com.ociweb.pronghorn.stage.scheduling.NonThreadScheduler;
import com.ociweb.pronghorn.stage.scheduling.ScriptedFixedThreadsScheduler;
import com.ociweb.pronghorn.stage.scheduling.StageScheduler;
import com.ociweb.pronghorn.stage.test.PipeCleanerStage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.concurrent.TimeUnit;
public class MsgRuntime<B extends BuilderImpl, L extends ListenerFilter> {
public static final Logger logger = LoggerFactory.getLogger(MsgRuntime.class);
protected static final int nsPerMS = 1_000_000;
public B builder;
protected final GraphManager gm;
protected final String[] args;
private StageScheduler scheduler;
private boolean hasPendingHighVolume;
protected String telemetryHost;
protected void setScheduler(StageScheduler scheduler) {
this.scheduler = scheduler;
if (hasPendingHighVolume) {
if (scheduler instanceof ScriptedFixedThreadsScheduler) {
((ScriptedFixedThreadsScheduler)scheduler).setEnsureLowLatency(false);
}
}
}
//NOTE: keep short since the MessagePubSubStage will STOP consuming message until the one put on here
// is actually taken off and consumed. We have little benefit to making this longer.
protected static final int defaultCommandChannelSubscriberLength = 8;
protected static final int defaultCommandChannelLength = 16;
protected static final int defaultCommandChannelMaxPayload = 256; //largest i2c request or pub sub payload
protected static final int defaultCommandChannelHTTPMaxPayload = 1<<14; //must be at least 32K for TLS support
protected boolean transducerAutowiring = true;
private PipeConfig<HTTPRequestSchema> fileRequestConfig;// = builder.restPipeConfig.grow2x();
private int netResponsePipeIdxCounter = 0;//this implementation is dependent upon graphManager returning the pipes in the order created!
protected int netResponsePipeIdx = -1;
protected int subscriptionPipeIdx = 0; //this implementation is dependent upon graphManager returning the pipes in the order created!
protected final IntHashTable subscriptionPipeLookup = new IntHashTable(10);//NOTE: this is a maximum of 1024 listeners
private BridgeConfig[] bridges = new BridgeConfig[0];
protected int parallelInstanceUnderActiveConstruction = -1;
protected Pipe<?>[] outputPipes = null;
protected ChildClassScannerVisitor gatherPipesVisitor = new ChildClassScannerVisitor<MsgCommandChannel>() {
@Override
public boolean visit(MsgCommandChannel cmdChnl, Object topParent) {
IntHashTable usageChecker = getUsageChecker();
if (null!=usageChecker) {
if (!ChildClassScanner.notPreviouslyHeld(cmdChnl, topParent, usageChecker)) {
logger.error("Command channel found in "+
topParent.getClass().getSimpleName()+
" can not be used in more than one Behavior");
assert(false) : "A CommandChannel instance can only be used exclusivly by one object or lambda. Double check where CommandChannels are passed in.";
}
}
MsgCommandChannel.setListener(cmdChnl, (Behavior)topParent);
//add this to the count of publishers
//CharSequence[] supportedTopics = cmdChnl.supportedTopics();
//get count of subscribers per topic as well.
//get the pipe ID of the singular PubSub...
outputPipes = PronghornStage.join(outputPipes, cmdChnl.getOutputPipes());
return true;//keep looking
}
};
public void disableTransducerAutowiring() {
transducerAutowiring = false;
}
private void keepBridge(BridgeConfig bridge) {
boolean isFound = false;
int i = bridges.length;
while (--i>=0) {
isFound |= bridge == bridges[0];
}
if (!isFound) {
i = bridges.length;
BridgeConfig[] newArray = new BridgeConfig[i+1];
System.arraycopy(bridges, 0, newArray, 0, i);
newArray[i] = bridge;
bridges = newArray;
}
}
public boolean setEnsureLowLatency(boolean value) {
if (scheduler instanceof ScriptedFixedThreadsScheduler) {
((ScriptedFixedThreadsScheduler)scheduler).setEnsureLowLatency(value);
return true;
} else {
if (null == scheduler) {
hasPendingHighVolume = !value;
return true;
} else {
logger.info("low latency switching is not supported for this scheduler");
return false;
}
}
}
public MsgRuntime(String[] args, String name) {
this.gm = new GraphManager(name);
this.args = args != null ? args : new String[0];
}
public String[] args() {
return args;
}
public final <T,S> S bridgeSubscription(CharSequence topic, BridgeConfig<T,S> config) {
long id = config.addSubscription(topic);
keepBridge(config);
return config.subscriptionConfigurator(id);
}
public final <T,S> S bridgeSubscription(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> config) {
long id = config.addSubscription(internalTopic,extrnalTopic);
keepBridge(config);
return config.subscriptionConfigurator(id);
}
public final <T,S> S bridgeSubscription(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> config, IngressConverter converter) {
long id = config.addSubscription(internalTopic,extrnalTopic,converter);
keepBridge(config);
return config.subscriptionConfigurator(id);
}
public final <T,S> T bridgeTransmission(CharSequence topic, BridgeConfig<T,S> config) {
long id = config.addTransmission(this, topic);
keepBridge(config);
return config.transmissionConfigurator(id);
}
public final <T,S> T bridgeTransmission(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> bridge) {
long id = bridge.addTransmission(this, internalTopic,extrnalTopic);
keepBridge(bridge);
return bridge.transmissionConfigurator(id);
}
public final <T,S> T bridgeTransmission(CharSequence internalTopic, CharSequence extrnalTopic, BridgeConfig<T,S> config, EgressConverter converter) {
long id = config.addTransmission(this, internalTopic,extrnalTopic, converter);
keepBridge(config);
return config.transmissionConfigurator(id);
}
public final L addRestListener(RestListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addResponseListener(HTTPResponseListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addStartupListener(StartupListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addShutdownListener(ShutdownListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addTimePulseListener(TimeListener listener) {
return (L) registerListenerImpl(listener);
}
public final L addPubSubListener(PubSubListener listener) {
return (L) registerListenerImpl(listener);
}
public final <E extends Enum<E>> L addStateChangeListener(StateChangeListener<E> listener) {
return (L) registerListenerImpl(listener);
}
public L registerListener(Behavior listener) {
return (L) registerListenerImpl(listener);
}
/////
public final L addRestListener(String id, RestListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addResponseListener(String id, HTTPResponseListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addStartupListener(String id, StartupListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addShutdownListener(String id, ShutdownListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addTimePulseListener(String id, TimeListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final L addPubSubListener(String id, PubSubListener listener) {
return (L) registerListenerImpl(id, listener);
}
public final <E extends Enum<E>> L addStateChangeListener(String id, StateChangeListener<E> listener) {
return (L) registerListenerImpl(id, listener);
}
public L registerListener(String id, Behavior listener) {
return (L) registerListenerImpl(id, listener);
}
public long fieldId(int routeId, byte[] fieldName) {
return builder.fieldId(routeId, fieldName);
}
protected void logStageScheduleRates() {
int totalStages = GraphManager.countStages(gm);
for(int i=1;i<=totalStages;i++) {
PronghornStage s = GraphManager.getStage(gm, i);
if (null != s) {
Object rate = GraphManager.getNota(gm, i, GraphManager.SCHEDULE_RATE, null);
if (null == rate) {
logger.debug("{} is running without breaks",s);
} else {
logger.debug("{} is running at rate of {}",s,rate);
}
}
}
}
public String getArgumentValue(String longName, String shortName, String defaultValue) {
return getOptArg(longName,shortName, args, defaultValue);
}
public boolean hasArgument(String longName, String shortName) {
return hasArg(longName, shortName, this.args);
}
public static String getOptArg(String longName, String shortName, String[] args, String defaultValue) {
String prev = null;
for (String token : args) {
if (longName.equals(prev) || shortName.equals(prev)) {
if (token == null || token.trim().length() == 0 || token.startsWith("-")) {
return defaultValue;
}
return reportChoice(longName, shortName, token.trim());
}
prev = token;
}
return reportChoice(longName, shortName, defaultValue);
}
public static boolean hasArg(String longName, String shortName, String[] args) {
for(String token : args) {
if(longName.equals(token) || shortName.equals(token)) {
reportChoice(longName, shortName, "");
return true;
}
}
return false;
}
static String reportChoice(final String longName, final String shortName, final String value) {
System.out.append(longName).append(" ").append(shortName).append(" ").append(value).append("\n");
return value;
}
protected void configureStageRate(Object listener, ReactiveListenerStage stage) {
//if we have a time event turn it on.
long rate = builder.getTriggerRate();
if (rate>0 && listener instanceof TimeListener) {
stage.setTimeEventSchedule(rate, builder.getTriggerStart());
//Since we are using the time schedule we must set the stage to be faster
long customRate = (rate*nsPerMS)/NonThreadScheduler.granularityMultiplier;// in ns and guanularityXfaster than clock trigger
long appliedRate = Math.min(customRate,builder.getDefaultSleepRateNS());
GraphManager.addNota(gm, GraphManager.SCHEDULE_RATE, appliedRate, stage);
}
}
public StageScheduler getScheduler() {
return scheduler;
}
public void shutdownRuntime() {
shutdownRuntime(3);
}
public void shutdownRuntime(final int secondsTimeout) {
//only do if not already done.
if (!isShutdownRequested()) {
if (null == scheduler || null == builder) {
System.exit(0);
return;
}
final Runnable lastCall = new Runnable() {
@Override
public void run() {
//all the software has now stopped so shutdown the hardware now.
builder.shutdown();
}
};
//notify all the reactors to begin shutdown.
ReactiveListenerStage.requestSystemShutdown(builder, new Runnable() {
@Override
public void run() {
scheduler.shutdown();
scheduler.awaitTermination(secondsTimeout, TimeUnit.SECONDS, lastCall, lastCall);
}
});
}
}
public boolean isShutdownRequested() {
return ReactiveListenerStage.isShutdownRequested(builder);
}
//////////
//only build this when assertions are on
//////////
public static IntHashTable cmdChannelUsageChecker;
static {
assert(setupForChannelAssertCheck());
}
private static boolean setupForChannelAssertCheck() {
cmdChannelUsageChecker = new IntHashTable(9);
return true;
}
private static IntHashTable getUsageChecker() {
return cmdChannelUsageChecker;
}
protected int addGreenPipesCount(Behavior listener, int pipesCount) {
if (this.builder.isListeningToHTTPResponse(listener)) {
pipesCount++; //these are calls to URL responses
}
if ( (!this.builder.isAllPrivateTopics())
&& this.builder.isListeningToSubscription(listener)) {
pipesCount++;
}
if (this.builder.isListeningHTTPRequest(listener)) {
pipesCount += ListenerConfig.computeParallel(builder, parallelInstanceUnderActiveConstruction);
}
return pipesCount;
}
protected void populateGreenPipes(Behavior listener, int pipesCount, Pipe<?>[] inputPipes) {
//if this listener is an HTTP listener then add its behavior id for this pipe
if (this.builder.isListeningToHTTPResponse(listener)) {
inputPipes[--pipesCount] = buildNetResponsePipe();
netResponsePipeIdx = netResponsePipeIdxCounter++;
builder.registerHTTPClientId(builder.behaviorId(listener), netResponsePipeIdx);
}
if ((!this.builder.isAllPrivateTopics())
&& this.builder.isListeningToSubscription(listener)) {
inputPipes[--pipesCount] = buildPublishPipe(listener);
}
//if we push to this 1 pipe all the requests...
//JoinStage to take N inputs and produce 1 output.
//we use splitter for single pipe to 2 databases
//we use different group for parallel processing
//for mutiple we must send them all to the reactor.
if (this.builder.isListeningHTTPRequest(listener) ) {
Pipe<HTTPRequestSchema>[] httpRequestPipes;
httpRequestPipes = ListenerConfig.newHTTPRequestPipes(builder, ListenerConfig.computeParallel(builder, parallelInstanceUnderActiveConstruction));
int i = httpRequestPipes.length;
assert(i>0) : "This listens to Rest requests but none have been routed here";
while (--i >= 0) {
inputPipes[--pipesCount] = httpRequestPipes[i];
}
}
}
private Pipe<NetResponseSchema> buildNetResponsePipe() {
Pipe<NetResponseSchema> netResponsePipe = new Pipe<NetResponseSchema>(builder.pcm.getConfig(NetResponseSchema.class)) {
@SuppressWarnings("unchecked")
@Override
protected DataInputBlobReader<NetResponseSchema> createNewBlobReader() {
return new HTTPResponseReader(this);//, gm.recordTypeData);
}
};
return netResponsePipe;
}
/**
* This pipe returns all the data this object has requested via subscriptions elsewhere.
* @param listener
*/
public Pipe<MessageSubscription> buildPublishPipe(Object listener) {
assert(!builder.isAllPrivateTopics()) : "must not call when private topics are exclusivly in use";
if (builder.isAllPrivateTopics()) {
throw new RuntimeException("oops");
}
Pipe<MessageSubscription> subscriptionPipe = buildMessageSubscriptionPipe();
//store this value for lookup later
//logger.info("adding hash listener {} to pipe ",System.identityHashCode(listener));
if (!IntHashTable.setItem(subscriptionPipeLookup, System.identityHashCode(listener), subscriptionPipeIdx++)) {
throw new RuntimeException("Could not find unique identityHashCode for "+listener.getClass().getCanonicalName());
}
assert(!IntHashTable.isEmpty(subscriptionPipeLookup));
return subscriptionPipe;
}
public Pipe<MessageSubscription> buildPublishPipe(int listenerHash) {
assert(!builder.isAllPrivateTopics()) : "must not call when private topics are exclusivly in use";
if (builder.isAllPrivateTopics()) {
throw new RuntimeException("oops");
}
Pipe<MessageSubscription> subscriptionPipe = buildMessageSubscriptionPipe();
if (!IntHashTable.setItem(subscriptionPipeLookup, listenerHash, subscriptionPipeIdx++)) {
throw new RuntimeException("HashCode must be unique");
}
assert(!IntHashTable.isEmpty(subscriptionPipeLookup));
return subscriptionPipe;
}
private Pipe<MessageSubscription> buildMessageSubscriptionPipe() {
Pipe<MessageSubscription> subscriptionPipe = new Pipe<MessageSubscription>(builder.pcm.getConfig(MessageSubscription.class)) {
@SuppressWarnings("unchecked")
@Override
protected DataInputBlobReader<MessageSubscription> createNewBlobReader() {
return new MessageReader(this);//, gm.recordTypeData);
}
};
return subscriptionPipe;
}
protected void constructingParallelInstance(int i) {
parallelInstanceUnderActiveConstruction = i;
}
protected void constructingParallelInstancesEnding() {
parallelInstanceUnderActiveConstruction = -1;
}
//////////////////
//server and other behavior
//////////////////
@SuppressWarnings("unchecked")
public void declareBehavior(MsgApp app) {
//The server and telemetry http hosts/ports MUST be defined before we begin
//the declaration of behaviors because we must do binding to the host names.
//as a result this finalize must happen early.
builder.finalizeDeclareConnections();
////////////////////////////////////////////
if (builder.getHTTPServerConfig() != null) {
buildGraphForServer(app);
} else {
app.declareBehavior(this);
if (app instanceof MsgAppParallel) {
int parallelism = builder.parallelTracks();
//since server was not started and did not create each parallel instance this will need to be done here
for(int i = 0;i<parallelism;i++) { //do not use this loop, we will loop inside server setup..
constructingParallelInstance(i);
((MsgAppParallel)app).declareParallelBehavior(this);
}
}
}
constructingParallelInstancesEnding();
//Init bridges
int b = bridges.length;
while (--b>=0) {
((BridgeConfigImpl)bridges[b]).finalizeDeclareConnections(this);
}
}
private void buildGraphForServer(MsgApp app) {
HTTPServerConfig config = builder.getHTTPServerConfig();
ServerPipesConfig serverConfig = config.buildServerConfig(builder.parallelTracks());
ServerCoordinator serverCoord = new ServerCoordinator(
config.getCertificates(),
config.bindHost(),
config.bindPort(),
serverConfig.maxConnectionBitsOnServer,
serverConfig.maxConcurrentInputs,
serverConfig.maxConcurrentOutputs,
builder.parallelTracks(), false,
"Server",
config.defaultHostPath());
final int routerCount = builder.parallelTracks();
final Pipe<NetPayloadSchema>[] encryptedIncomingGroup = Pipe.buildPipes(serverConfig.maxConcurrentInputs, serverConfig.incomingDataConfig);
Pipe[] acks = NetGraphBuilder.buildSocketReaderStage(gm, serverCoord, routerCount, serverConfig, encryptedIncomingGroup);
Pipe[] handshakeIncomingGroup=null;
Pipe[] planIncomingGroup;
if (config.isTLS()) {
planIncomingGroup = Pipe.buildPipes(serverConfig.maxConcurrentInputs, serverConfig.incomingDataConfig);
handshakeIncomingGroup = NetGraphBuilder.populateGraphWithUnWrapStages(gm, serverCoord,
serverConfig.serverRequestUnwrapUnits, serverConfig.handshakeDataConfig,
encryptedIncomingGroup, planIncomingGroup, acks);
} else {
planIncomingGroup = encryptedIncomingGroup;
}
//Must call here so the beginning stages of the graph are drawn first when exporting graph.
app.declareBehavior(this);
buildLastHalfOfGraphForServer(app, serverConfig, serverCoord, routerCount,
acks, handshakeIncomingGroup, planIncomingGroup);
}
private void buildLastHalfOfGraphForServer(MsgApp app, ServerPipesConfig serverConfig,
ServerCoordinator serverCoord, final int routerCount, Pipe[] acks,
Pipe[] handshakeIncomingGroup,
Pipe[] planIncomingGroup) {
////////////////////////
//create the working modules
//////////////////////////
if (app instanceof MsgAppParallel) {
int p = builder.parallelTracks();
for (int i = 0; i < p; i++) {
constructingParallelInstance(i);
((MsgAppParallel)app).declareParallelBehavior(this); //this creates all the modules for this parallel instance
}
} else {
if (builder.parallelTracks()>1) {
throw new UnsupportedOperationException(
"Remove call to parallelism("+builder.parallelTracks()+") OR make the application implement GreenAppParallel or something extending it.");
}
}
//////////////////
//////////////////
HTTP1xRouterStageConfig routerConfig = builder.routerConfig();
ArrayList<Pipe> forPipeCleaner = new ArrayList<Pipe>();
Pipe<HTTPRequestSchema>[][] fromRouterToModules = new Pipe[routerCount][];
int t = routerCount;
int totalRequestPipes = 0;
while (--t>=0) {
//[router/parallel] then [parser/routes]
int path = routerConfig.totalPathsCount();
/////////////////
///for catch all
///////////////
if (path==0) {
path=1;
}
/////////////
fromRouterToModules[t] = new Pipe[path];
while (--path >= 0) {
ArrayList<Pipe<HTTPRequestSchema>> requestPipes = builder.buildFromRequestArray(t, path);
//with a single pipe just pass it one, otherwise use the replicator to fan out from a new single pipe.
int size = requestPipes.size();
totalRequestPipes += size;
if (1==size) {
fromRouterToModules[t][path] =
requestPipes.get(0);
} else {
//we only create a pipe when we are about to use the replicator
fromRouterToModules[t][path] =
builder.newHTTPRequestPipe(builder.pcm.getConfig(HTTPRequestSchema.class));
if (0==size) {
logger.info("warning there are routes without any consumers");
//we have no consumer so tie it to pipe cleaner
forPipeCleaner.add(fromRouterToModules[t][path]);
} else {
ReplicatorStage.newInstance(gm, fromRouterToModules[t][path], requestPipes.toArray(new Pipe[requestPipes.size()]));
}
}
}
if (0==totalRequestPipes) {
logger.warn("ERROR: includeRoutes or includeAllRoutes must be called on REST listener.");
}
}
if (!forPipeCleaner.isEmpty()) {
PipeCleanerStage.newInstance(gm, forPipeCleaner.toArray(new Pipe[forPipeCleaner.size()]));
}
//NOTE: building arrays of pipes grouped by parallel/routers heading out to order supervisor
Pipe<ServerResponseSchema>[][] fromModulesToOrderSuper = new Pipe[routerCount][];
Pipe<ServerResponseSchema>[] errorResponsePipes = new Pipe[routerCount];
PipeConfig<ServerResponseSchema> errConfig = ServerResponseSchema.instance.newPipeConfig(4, 512);
int r = routerCount;
while (--r>=0) {
errorResponsePipes[r] = new Pipe<ServerResponseSchema>(errConfig);
Pipe<ServerResponseSchema>[] temp =
fromModulesToOrderSuper[r] = PronghornStage.join(errorResponsePipes[r], builder.buildToOrderArray(r));
//this block is required to make sure the ordering stage has room
int c = temp.length;
while (--c>=0) {
//ensure that the ordering stage can consume messages of this size
serverConfig.ensureServerCanWrite(temp[c].config().maxVarLenSize());
}
}
boolean catchAll = builder.routerConfig().totalPathsCount()==0;
NetGraphBuilder.buildRouters(gm, planIncomingGroup, acks, fromRouterToModules,
errorResponsePipes, routerConfig, serverCoord,
catchAll);
//NOTE: this array populated here must be equal or larger than the fromModules..
Pipe<NetPayloadSchema>[] fromOrderedContent = NetGraphBuilder.buildRemainderOFServerStages(gm, serverCoord, serverConfig, handshakeIncomingGroup);
//NOTE: the fromOrderedContent must hold var len data which is greater than fromModulesToOrderSuper
NetGraphBuilder.buildOrderingSupers(gm, serverCoord, routerCount,
fromModulesToOrderSuper, fromOrderedContent);
}
//////////////////
//end of server and other behavior
//////////////////
public void setExclusiveTopics(MsgCommandChannel cc, String ... exlusiveTopics) {
// TODO Auto-generated method stub
throw new UnsupportedOperationException("Not yet implemented");
}
//Not for general consumption, only used when we need the low level Pipes to connect directly to the pub/sub or other dynamic subsystem.
public static GraphManager getGraphManager(MsgRuntime runtime) {
return runtime.gm;
}
public RouteFilter addFileServer(String path) { //adds server to all routes
final int parallelIndex = (-1 == parallelInstanceUnderActiveConstruction) ? 0 : parallelInstanceUnderActiveConstruction;
//due to internal implementation we must keep the same number of outputs as inputs.
Pipe<HTTPRequestSchema>[] inputs = new Pipe[1];
Pipe<ServerResponseSchema>[] outputs = new Pipe[1];
populateHTTPInOut(inputs, outputs, 0, parallelIndex);
File rootPath = buildFilePath(path);
FileReadModuleStage.newInstance(gm, inputs, outputs, builder.httpSpec, rootPath);
return new StageRouteFilter(inputs[0], builder, parallelIndex);
}
public RouteFilter addFileServer(String resourceRoot, String resourceDefault) {
final int parallelIndex = (-1 == parallelInstanceUnderActiveConstruction) ? 0 : parallelInstanceUnderActiveConstruction;
//due to internal implementation we must keep the same number of outputs as inputs.
Pipe<HTTPRequestSchema>[] inputs = new Pipe[1];
Pipe<ServerResponseSchema>[] outputs = new Pipe[1];
populateHTTPInOut(inputs, outputs, 0, parallelIndex);
FileReadModuleStage.newInstance(gm, inputs, outputs, builder.httpSpec, resourceRoot, resourceDefault);
return new StageRouteFilter(inputs[0], builder, parallelIndex);
}
private void populateHTTPInOut(Pipe<HTTPRequestSchema>[] inputs,
Pipe<ServerResponseSchema>[] outputs,
int idx, int parallelIndex) {
if (null == fileRequestConfig) {
fileRequestConfig = builder.pcm.getConfig(HTTPRequestSchema.class).grow2x();
}
inputs[idx] = builder.newHTTPRequestPipe(fileRequestConfig);
outputs[idx] = builder.newNetResponsePipe(builder.pcm.getConfig(ServerResponseSchema.class), parallelIndex);
}
//TODO: needs a lot of re-work.
private File buildFilePath(String path) {
// Strip URL space tokens from incoming path strings to avoid issues.
// TODO: This should be made garbage free.
// TODO: Is this expected behavior?
path = path.replaceAll("\\Q%20\\E", " ");
//TODO: MUST FIND PATH...
Enumeration<URL> resource;
try {
resource = ClassLoader.getSystemResources(path);
while (resource.hasMoreElements()) {
System.err.println("looking for resoruce: "+path+" and found "+String.valueOf(resource.nextElement()));
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// ClassLoader.getSystemResource(path);
File rootPath = new File(path);
if (!rootPath.exists()) {
//test if this is under development
File devPath = new File("./src/main/resources/"+path);
if (devPath.exists()) {
rootPath = devPath;
}
}
if (!rootPath.exists()) {
throw new UnsupportedOperationException("Path not found: "+rootPath);
}
return rootPath;
}
///////////////////////////
//end of file server
///////////////////////////
public Builder getBuilder(){
if(this.builder==null){
this.builder = (B) new BuilderImpl(gm,args);
}
return this.builder;
}
private ListenerFilter registerListenerImpl(final Behavior listener) {
return registerListenerImpl(null, listener);
}
private ListenerFilter registerListenerImpl(final String id, final Behavior listener) {
////////////
//OUTPUT
///////////
outputPipes = new Pipe<?>[0];
//extract pipes used by listener and use cmdChannelUsageChecker to confirm its not re-used
ChildClassScanner.visitUsedByClass(listener, gatherPipesVisitor, MsgCommandChannel.class);//populates outputPipes
/////////////
//INPUT
//add green features, count first then create the pipes
//NOTE: that each Behavior is inspected and will find Transducers which need inputs as well
/////////
int pipesCount = addGreenPipesCount(listener, 0);
Pipe<?>[] inputPipes = new Pipe<?>[pipesCount];
populateGreenPipes(listener, pipesCount, inputPipes);
//////////////////////
//////////////////////
//this is empty when transducerAutowiring is off
final ArrayList<ReactiveManagerPipeConsumer> consumers = new ArrayList<ReactiveManagerPipeConsumer>();
//extract this into common method to be called in GL and FL
if (transducerAutowiring) {
inputPipes = autoWireTransducers(listener, inputPipes, consumers);
}
if (null!=id) {
List<PrivateTopic> sourceTopics = builder.getPrivateTopicsFromSource(id);
int i = sourceTopics.size();
while (--i>=0) {
PrivateTopic privateTopic = sourceTopics.get(i);
outputPipes = PronghornStage.join(outputPipes, privateTopic.getPipe(parallelInstanceUnderActiveConstruction));
}
List<PrivateTopic> targetTopics = builder.getPrivateTopicsFromTarget(id);
int j = targetTopics.size();
while (--j>=0) {
PrivateTopic privateTopic = targetTopics.get(j);
inputPipes = PronghornStage.join(inputPipes, privateTopic.getPipe(parallelInstanceUnderActiveConstruction));
}
}
ReactiveListenerStage<?> reactiveListener = builder.createReactiveListener(gm, listener,
inputPipes, outputPipes, consumers,
parallelInstanceUnderActiveConstruction,id);
//finds all the command channels which make use of private topics.
reactiveListener.regPrivateTopics();
if (listener instanceof RestListenerBase) {
GraphManager.addNota(gm, GraphManager.DOT_RANK_NAME, "ModuleStage", reactiveListener);
}
/////////////////////
//StartupListener is not driven by any response data and is called when the stage is started up. no pipe needed.
/////////////////////
//TimeListener, time rate signals are sent from the stages its self and therefore does not need a pipe to consume.
/////////////////////
configureStageRate(listener,reactiveListener);
int testId = -1;
int i = inputPipes.length;
while (--i>=0) {
if (inputPipes[i]!=null && Pipe.isForSchema((Pipe<MessageSubscription>)inputPipes[i], MessageSubscription.class)) {
testId = inputPipes[i].id;
}
}
assert(-1==testId || GraphManager.allPipesOfType(gm, MessageSubscription.instance)[subscriptionPipeIdx-1].id==testId) : "GraphManager has returned the pipes out of the expected order";
return reactiveListener;
}
protected Pipe<?>[] autoWireTransducers(final Behavior listener, Pipe<?>[] inputPipes,
final ArrayList<ReactiveManagerPipeConsumer> consumers) {
if (inputPipes.length==0) {
return inputPipes;//no work since no inputs are used.
}
final Grouper g = new Grouper(inputPipes);
ChildClassScannerVisitor tVisitor = new ChildClassScannerVisitor() {
@Override
public boolean visit(Object child, Object topParent) {
if (g.additions()==0) {
//add first value
Pipe[] pipes = builder.operators.createPipes(builder, listener, g);
consumers.add(new ReactiveManagerPipeConsumer(listener, builder.operators, pipes));
g.add(pipes);
}
int c = consumers.size();
while (--c>=0) {
if (consumers.get(c).obj == child) {
//do not add this one it is already recorded
return true;
}
}
Pipe[] pipes = builder.operators.createPipes(builder, child, g);
consumers.add(new ReactiveManagerPipeConsumer(child, builder.operators, pipes));
g.add(pipes);
return true;
}
};
ChildClassScanner.visitUsedByClass(listener, tVisitor, ListenerTransducer.class);
if (g.additions()>0) {
inputPipes = g.firstArray();
g.buildReplicators(gm, consumers);
}
return inputPipes;
}
protected PipeConfigManager buildPipeManager() {
PipeConfigManager pcm = new PipeConfigManager();
pcm.addConfig(defaultCommandChannelLength,0,TrafficOrderSchema.class );
return pcm;
}
public static IntHashTable getSubPipeLookup(MsgRuntime runtime) {
return runtime.subscriptionPipeLookup;
}
}
|
removed old API
|
src/main/java/com/ociweb/gl/api/MsgRuntime.java
|
removed old API
|
|
Java
|
mit
|
90259f58ffe7cb99510bc04ab2c63707a537a329
| 0
|
saphraxaeris/bigdata-project,saphraxaeris/bigdata-project,saphraxaeris/bigdata-project
|
package drivers;
import java.io.File;
import java.io.PrintWriter;
import java.io.IOException;
import java.util.Scanner;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import drivers.Task1Driver;
import drivers.Task2Driver;
import drivers.Task3Driver;
import drivers.Task4Driver;
public class MainDriver {
public static void main(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: <input path> <output directory>");
System.exit(-1);
}
//Run Tasks
runTasks(args);
System.out.println("\n\nRan all jobs...");
//Get Result Data
getData(args[1]);
}
private static void runTasks(String[] args) {
try {
Task1Driver.run(args[0], args[1] + "/task1");
Task2Driver.run(args[0], args[1] + "/task2");
Task3Driver.run(args[0], args[1] + "/task3");
Task4Driver.run(args[0], args[1] + "/task4");
Task5Driver.run(args[0], args[1] + "/task5");
Task6Driver.run(args[0], args[1] + "/task6");
}
catch(Exception ex) {
System.out.println("There was an error processing tasks.");
}
}
private static void getData(String path) {
try {
System.out.println("Connecting to HDFS...");
Configuration conf = new Configuration();
FileSystem hdfsFileSystem = FileSystem.get(conf);
Path local = new Path("downloads/part-r-00000");
System.out.println("Getting Task 1 result...");
Path hdfs = new Path(path + "/task1/part-r-00000");
hdfsFileSystem.copyToLocalFile(false, hdfs, local, true);
System.out.println("Massaging Task 1 result...");
File task1Data = new File("downloads/part-r-00000");
Scanner task1File = new Scanner(task1Data);
String outputText = "var task1Data = [";
while(task1File.hasNext()) {
outputText += "{text: '" + task1File.next() + "', size: " + task1File.next() + "},";
}
outputText = outputText.substring(0, outputText.length()-1);
outputText += "];";
task1File.close();
File outputFile = new File("results/task1.js");
PrintWriter output = new PrintWriter(outputFile);
output.write(outputText);
output.close();
System.out.println("Massaging Task 2 result...");
//local = new Path("downloads/part-r-000002");
hdfs = new Path(path + "/task2/part-r-00000");
hdfsFileSystem.copyToLocalFile(false, hdfs, local, true);
File task2Data = new File("downloads/part-r-00000");
Scanner task2File = new Scanner(task2Data);
outputText = "var task2Data = [";
while(task2File.hasNext()) {
String text = task2File.next();
String size = task2File.next();
outputText += "{text: '" + text + "', size: " + size + "},";
System.out.println(text + " " + size);
}
outputText = outputText.substring(0, outputText.length()-1);
outputText += "];";
outputFile = new File("results/task2.js");
output = new PrintWriter(outputFile);
output.write(outputText);
output.close();
task2File.close();
System.out.println("Massaging Task 3 result...");
//local = new Path("downloads/part-r-000003");
hdfs = new Path(path + "/task3/part-r-00000");
hdfsFileSystem.copyToLocalFile(false, hdfs, local, true);
File task3Data = new File("downloads/part-r-00000");
Scanner task3File = new Scanner(task3Data);
outputText = "var task3Data = [";
while(task3File.hasNext()) {
outputText += "{username: '" + task3File.next() + "', count: " + task3File.next() + "},";
}
outputText = outputText.substring(0, outputText.length()-1);
outputText += "];";
outputFile = new File("results/task3.js");
output = new PrintWriter(outputFile);
output.write(outputText);
output.close();
task3File.close();
}
catch(IOException ex) {
System.out.println("There was an error accessing HDFS:\n" + ex.getMessage());
}
}
}
|
src/main/java/drivers/MainDriver.java
|
package drivers;
import java.io.File;
import java.io.PrintWriter;
import java.io.IOException;
import java.util.Scanner;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import drivers.Task1Driver;
import drivers.Task2Driver;
import drivers.Task3Driver;
import drivers.Task4Driver;
public class MainDriver {
public static void main(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: <input path> <output directory>");
System.exit(-1);
}
//Run Tasks
runTasks(args);
System.out.println("\n\nRan all jobs...");
//Get Result Data
getData(args[1]);
}
private static void runTasks(String[] args) {
try {
Task1Driver.run(args[0], args[1] + "/task1");
Task2Driver.run(args[0], args[1] + "/task2");
Task3Driver.run(args[0], args[1] + "/task3");
Task4Driver.run(args[0], args[1] + "/task4");
Task5Driver.run(args[0], args[1] + "/task5");
Task6Driver.run(args[0], args[1] + "/task6");
}
catch(Exception ex) {
System.out.println("There was an error processing tasks.");
}
}
private static void getData(String path) {
try {
System.out.println("Connecting to HDFS...");
Configuration conf = new Configuration();
FileSystem hdfsFileSystem = FileSystem.get(conf);
Path local = new Path("downloads/part-r-00000");
System.out.println("Getting Task 1 result...");
Path hdfs = new Path(path + "/task1/part-r-00000");
hdfsFileSystem.copyToLocalFile(false, hdfs, local, true);
System.out.println("Massaging Task 1 result...");
File task1Data = new File("downloads/part-r-00000");
Scanner task1File = new Scanner(task1Data);
String outputText = "var task1Data = [";
while(task1File.hasNext()) {
outputText += "{text: '" + task1File.next() + "', size: " + task1File.next() + "},";
}
outputText = outputText.substring(0, outputText.length()-1);
outputText += "];";
task1File.close();
File outputFile = new File("results/task1.js");
PrintWriter output = new PrintWriter(outputFile);
output.write(outputText);
output.close();
System.out.println("Massaging Task 2 result...");
//local = new Path("downloads/part-r-000002");
hdfs = new Path(path + "/task2/part-r-00000");
hdfsFileSystem.copyToLocalFile(false, hdfs, local, true);
File task2Data = new File("downloads/part-r-00000");
Scanner task2File = new Scanner(task2Data);
outputText = "var task2Data = [";
while(task2File.hasNext()) {
outputText += "{text: '" + task2File.next() + "', size: " + task2File.next() + "},";
}
outputText = outputText.substring(0, outputText.length()-1);
outputText += "];";
outputFile = new File("results/task2.js");
output = new PrintWriter(outputFile);
output.write(outputText);
output.close();
task2File.close();
System.out.println("Massaging Task 3 result...");
//local = new Path("downloads/part-r-000003");
hdfs = new Path(path + "/task3/part-r-00000");
hdfsFileSystem.copyToLocalFile(false, hdfs, local, true);
File task3Data = new File("downloads/part-r-00000");
Scanner task3File = new Scanner(task3Data);
outputText = "var task3Data = [";
while(task3File.hasNext()) {
outputText += "{username: '" + task3File.next() + "', count: " + task3File.next() + "},";
}
outputText = outputText.substring(0, outputText.length()-1);
outputText += "];";
outputFile = new File("results/task3.js");
output = new PrintWriter(outputFile);
output.write(outputText);
output.close();
task3File.close();
}
catch(IOException ex) {
System.out.println("There was an error accessing HDFS:\n" + ex.getMessage());
}
}
}
|
added logging
|
src/main/java/drivers/MainDriver.java
|
added logging
|
|
Java
|
mit
|
bf7200b2ae8c651302bf0017b9b2846d52adc3a1
| 0
|
DDoS/OnlineGame
|
package ecse414.fall2015.group21.game.shared.data;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;
import java.util.Map;
import io.netty.buffer.ByteBuf;
/**
* Represents a packet to be converted to a byte buffer and sent over the network. Exists in two flavours: UDP and TCP
*/
public interface Packet {
/**
* Returns the packet type.
*
* @return The type
*/
Type getType();
/**
* Stores the packet information in a new byte buffer for sending over the network.
*
* @return The packet as a buffer
*/
ByteBuf asRaw();
/**
* A UDP packet.
*/
interface UDP extends Packet {
/**
* A factory for converting from byte buffers to UDP packets.
*/
Factory<Packet.UDP> FACTORY = new Factory<>();
}
/**
* A TCP packet.
*/
interface TCP extends Packet {
/**
* A factory for converting from byte buffers to TCP packets.
*/
Factory<Packet.TCP> FACTORY = new Factory<>();
}
/**
* All the packet types.
*/
enum Type {
CONNECT_REQUEST(0),
CONNECT_FULFILL(1),
TIME_SYNC_REQUEST(2),
TIME_SYNC_FULFILL(3),
PLAYER_STATE(4),
PLAYER_SHOOT(5),
PLAYER_HEALTH(6);
/**
* Converts from an ID to a packet type.
*/
public static final Type[] BY_ID = values();
/**
* Gets the packet type ID.
*/
public final byte id;
Type(int id) {
this.id = (byte) id;
}
}
/**
* A factory that uses reflection to call the proper packet constructor for the given byte buffer. The first byte of the packet is used as the ID of the type. Packets are registered by type.
*
* @param <T> The type of packet
*/
class Factory<T extends Packet> {
private final Map<Byte, Constructor<? extends T>> constructors = new HashMap<>();
/**
* Registers a packet class for the given types.
*
* @param packet The packet class to use for the types
* @param types The types the packet class represents
*/
public void register(Class<? extends T> packet, Packet.Type... types) {
try {
final Constructor<? extends T> constructor = packet.getDeclaredConstructor(ByteBuf.class);
constructor.setAccessible(true);
for (Type type : types) {
constructors.put(type.id, constructor);
}
} catch (NoSuchMethodException exception) {
throw new RuntimeException(exception);
}
}
/**
* Gets the first byte of the buffer, converts it to a packet type and looks for a constructor for that type. On success, constructs a new packet from the buffer.
*
* @param buf The buffer to construct the packet from
* @param <I> The expected type of packet that will be constructed
* @return The resulting packet
*/
@SuppressWarnings("unchecked")
public <I extends T> I newInstance(ByteBuf buf) {
try {
return (I) constructors.get(buf.getByte(0)).newInstance(buf);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException exception) {
throw new RuntimeException(exception);
}
}
}
}
|
src/main/java/ecse414/fall2015/group21/game/shared/data/Packet.java
|
package ecse414.fall2015.group21.game.shared.data;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.HashMap;
import java.util.Map;
import io.netty.buffer.ByteBuf;
/**
*
*/
public interface Packet {
Type getType();
ByteBuf asRaw();
interface UDP extends Packet {
Factory<Packet.UDP> FACTORY = new Factory<>();
}
interface TCP extends Packet {
Factory<Packet.TCP> FACTORY = new Factory<>();
}
enum Type {
CONNECT_REQUEST(0),
CONNECT_FULFILL(1),
TIME_SYNC_REQUEST(2),
TIME_SYNC_FULFILL(3),
PLAYER_STATE(4),
PLAYER_SHOOT(5),
PLAYER_HEALTH(6);
public static final Type[] BY_ID = values();
public final byte id;
Type(int id) {
this.id = (byte) id;
}
}
class Factory<T extends Packet> {
private final Map<Byte, Constructor<? extends T>> constructors = new HashMap<>();
public void register(Class<? extends T> packet, Packet.Type... types) {
try {
final Constructor<? extends T> constructor = packet.getDeclaredConstructor(ByteBuf.class);
constructor.setAccessible(true);
for (Type type : types) {
constructors.put(type.id, constructor);
}
} catch (NoSuchMethodException exception) {
throw new RuntimeException(exception);
}
}
@SuppressWarnings("unchecked")
public <I extends T> I newInstance(ByteBuf buf) {
try {
return (I) constructors.get(buf.getByte(0)).newInstance(buf);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException exception) {
throw new RuntimeException(exception);
}
}
}
}
|
Add some javadocs in the Packet class
|
src/main/java/ecse414/fall2015/group21/game/shared/data/Packet.java
|
Add some javadocs in the Packet class
|
|
Java
|
mit
|
02c2c5a2f71ed301205d926fe8042b5fdecab363
| 0
|
Sarwat/GeoSpark,zongsizhang/GeoSpark
|
package org.datasyslab.geospark.formatMapper.shapefileParser.boundary;
import java.io.Serializable;
import java.util.Arrays;
/**
* Created by zongsizhang on 7/6/17.
*/
public class BoundBox implements Serializable{
/** bounds of 8 numbers. Xmin, Ymin, Xmax, Ymax, Zmin, Zmax, Mmin, Mmax */
double[] bounds = null;
/**
* construct bounds with an array
* @param bounds
*/
public BoundBox(double[] bounds) {
this.bounds = Arrays.copyOf(bounds, bounds.length);
}
/**
* construct by copy other boundbox
* @param otherbox
*/
public BoundBox(BoundBox otherbox) {
this.bounds = otherbox.copyBounds();
}
/**
* construct a initial boundBox with all value 0
*/
public BoundBox() {
bounds = new double[8];
}
/**
* set tuple at i with value
* @param i
* @param value
*/
public void set(int i, double value){
bounds[i] = value;
}
/**
* return a copy of bounds
* @return
*/
public double[] copyBounds(){
return Arrays.copyOf(bounds, bounds.length);
}
/**
* convert bounds array to string
* @return
*/
@Override
public String toString() {
StringBuilder strBuilder = new StringBuilder();
for (int i = 0;i < bounds.length; ++i){
strBuilder.append(bounds[i] + ", ");
}
return strBuilder.toString();
}
/** set min X */
public void setXMin(double value){
bounds[0] = value;
}
/** set min Y */
public void setYMin(double value){
bounds[0] = value;
}
/** set max X */
public void setXMax(double value){
bounds[0] = value;
}
/** set max Y */
public void setYMax(double value){
bounds[0] = value;
}
/** set min Z */
public void setZMin(double value){
bounds[0] = value;
}
/** set max Z */
public void setZMax(double value){
bounds[0] = value;
}
/** set min M */
public void setMMin(double value){
bounds[0] = value;
}
/** set max M */
public void setMMax(double value){
bounds[0] = value;
}
/**get min X */
public double getXMin(){
return bounds[0];
}
/**get max X */
public double getXMax(){
return bounds[2];
}
/**get min Y */
public double getYMin(){
return bounds[1];
}
/**get max Y */
public double getYMax(){
return bounds[3];
}
/**get min Z */
public double getZMin(){
return bounds[4];
}
/**get max Z */
public double getZMax(){
return bounds[5];
}
/**get min M */
public double getMMin(){
return bounds[6];
}
/**get max M */
public double getMMax(){
return bounds[7];
}
/**
* calculate the union of two bound box
* @param box1
* @param box2
* @return
*/
public static BoundBox mergeBoundBox(BoundBox box1, BoundBox box2){
BoundBox box = new BoundBox();
// merge X
box.setXMin(Math.min(box1.getXMin(), box2.getXMin()));
box.setXMax(Math.max(box1.getXMax(), box2.getXMax()));
// merge Y
box.setYMin(Math.min(box1.getYMin(), box2.getYMin()));
box.setYMax(Math.max(box1.getYMax(), box2.getYMax()));
// merge Z
box.setZMin(Math.min(box1.getZMin(), box2.getZMin()));
box.setZMax(Math.max(box1.getZMax(), box2.getZMax()));
// merge M
box.setMMin(Math.min(box1.getMMin(), box2.getMMin()));
box.setMMax(Math.max(box1.getMMax(), box2.getMMax()));
return box;
}
}
|
core/src/main/java/org/datasyslab/geospark/formatMapper/shapefileParser/boundary/BoundBox.java
|
package org.datasyslab.geospark.formatMapper.shapefileParser.boundary;
import java.io.Serializable;
import java.util.Arrays;
/**
* Created by zongsizhang on 7/6/17.
*/
public class BoundBox implements Serializable{
/** bounds of 8 numbers. Xmin, Ymin, Xmax, Ymax, Zmin, Zmax, Mmin, Mmax */
double[] bounds = null;
/**
* construct bounds with an array
* @param bounds
*/
public BoundBox(double[] bounds) {
this.bounds = Arrays.copyOf(bounds, bounds.length);
}
/**
* construct by copy other boundbox
* @param otherbox
*/
public BoundBox(BoundBox otherbox) {
this.bounds = otherbox.copyBounds();
}
/**
* construct a initial boundBox with all value 0
*/
public BoundBox() {
bounds = new double[8];
}
/**
* set value at i with value
* @param i
* @param value
*/
public void set(int i, double value){
bounds[i] = value;
}
public void setXMin(double value){
bounds[0] = value;
}
public void setYMin(double value){
bounds[0] = value;
}
public void setXMax(double value){
bounds[0] = value;
}
public void setYMax(double value){
bounds[0] = value;
}
public void setZMin(double value){
bounds[0] = value;
}
public void setZMax(double value){
bounds[0] = value;
}
public void setMMin(double value){
bounds[0] = value;
}
public void setMMax(double value){
bounds[0] = value;
}
public double getXMin(){
return bounds[0];
}
public double getXMax(){
return bounds[2];
}
public double getYMin(){
return bounds[1];
}
public double getYMax(){
return bounds[3];
}
public double getZMin(){
return bounds[4];
}
public double getZMax(){
return bounds[5];
}
public double getMMin(){
return bounds[6];
}
public double getMMax(){
return bounds[7];
}
public double[] copyBounds(){
return Arrays.copyOf(bounds, bounds.length);
}
@Override
public String toString() {
StringBuilder strBuilder = new StringBuilder();
for (int i = 0;i < bounds.length; ++i){
strBuilder.append(bounds[i] + ", ");
}
return strBuilder.toString();
}
public static BoundBox mergeBoundBox(BoundBox box1, BoundBox box2){
BoundBox box = new BoundBox();
// merge X
box.setXMin(Math.min(box1.getXMin(), box2.getXMin()));
box.setXMax(Math.max(box1.getXMax(), box2.getXMax()));
// merge Y
box.setYMin(Math.min(box1.getYMin(), box2.getYMin()));
box.setYMax(Math.max(box1.getYMax(), box2.getYMax()));
// merge Z
box.setZMin(Math.min(box1.getZMin(), box2.getZMin()));
box.setZMax(Math.max(box1.getZMax(), box2.getZMax()));
// merge M
box.setMMin(Math.min(box1.getMMin(), box2.getMMin()));
box.setMMax(Math.max(box1.getMMax(), box2.getMMax()));
return box;
}
}
|
add comments
|
core/src/main/java/org/datasyslab/geospark/formatMapper/shapefileParser/boundary/BoundBox.java
|
add comments
|
|
Java
|
mit
|
624b235dc00de6fec71b8e61f069fb070c556f8e
| 0
|
ClintonCao/UnifiedASATVisualizer,ClintonCao/UnifiedASATVisualizer,ClintonCao/Contextproject-TSE,ClintonCao/Contextproject-TSE,ClintonCao/Contextproject-TSE,ClintonCao/UnifiedASATVisualizer
|
package BlueTurtle.TSE;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.Setter;
/**
* Represents a basic unit of work for the analyser to process.
*
* @author BlueTurtle.
*
*/
@AllArgsConstructor
public class AnalyserCommand {
@Getter @Setter private String defaultOutputFilePath;
@Getter @Setter private String[] args;
}
|
src/main/java/BlueTurtle/TSE/AnalyserCommand.java
|
package BlueTurtle.TSE;
/**
* Represents a basic unit of work for the analyser to process.
*
* @author BlueTurtle.
*
*/
public class AnalyserCommand {
private String defaultOutputFilePath;
private String[] args;
/**
* Constructor.
*
* @param defaultOutputFilePath
* the output of the result.
* @param args
* - command line arguments.
*/
public AnalyserCommand(String defaultOutputFilePath, String[] args) {
this.setDefaultOutputFilePath(defaultOutputFilePath);
this.setArgs(args);
}
public String[] getArgs() {
return args;
}
public void setArgs(String[] args) {
this.args = args;
}
public String getDefaultOutputFilePath() {
return defaultOutputFilePath;
}
public void setDefaultOutputFilePath(String defaultOutputFilePath) {
this.defaultOutputFilePath = defaultOutputFilePath;
}
}
|
lombok'd analysercommand.
|
src/main/java/BlueTurtle/TSE/AnalyserCommand.java
|
lombok'd analysercommand.
|
|
Java
|
mit
|
08b4e6de6d7a9296023d45bc0fea19e0434abd7b
| 0
|
hrgdavor/java-watcher,hrgdavor/java-watcher
|
package hr.hrg.javawatcher;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/** Combination of single {@link FolderWatcher} and a {@link FileMatchGlob} to simplify simple watch situations*/
public class GlobWatcher {
FolderWatcher<FileMatchGlob> watcher = new FolderWatcher<FileMatchGlob>();
FileMatchGlob matcher;
public Path relativize(Path path) {
return matcher.relativize(path);
}
public FileMatchGlob add(FileMatchGlob matcher) {
return watcher.add(matcher);
}
public GlobWatcher(Path root, boolean recursive) {
matcher = new FileMatchGlob(root, recursive);
watcher.add(matcher);
}
/** Recursive from a root. */
public GlobWatcher(Path root) {
this(root,true);
}
public static final Collection<Path> toPaths(Collection<FileChangeEntry<FileMatchGlob>> poll) {
if(poll == null) return null;
Collection<Path> paths = new ArrayList<Path>(poll.size());
for(FileChangeEntry<FileMatchGlob> p:poll) paths.add(p.getPath());
return paths;
}
public Collection<FileChangeEntry<FileMatchGlob>> poll(){
return watcher.poll();
}
public Collection<Path> pollFiles(){
return toPaths(watcher.poll());
}
public Collection<FileChangeEntry<FileMatchGlob>> takeBatch(long burstDelay) {
return watcher.takeBatch(burstDelay);
}
public Collection<Path> takeBatchFiles(long burstDelay) {
return toPaths(watcher.takeBatch(burstDelay));
}
public Collection<FileChangeEntry<FileMatchGlob>> poll(long timeout, TimeUnit unit) throws InterruptedException {
return watcher.poll(timeout, unit);
}
public Collection<Path> pollFiles(long timeout, TimeUnit unit) throws InterruptedException {
return toPaths(watcher.poll(timeout, unit));
}
public Collection<FileChangeEntry<FileMatchGlob>> take() throws InterruptedException {
return watcher.take();
}
public Collection<Path> takeFiles() throws InterruptedException {
return toPaths(watcher.take());
}
public Collection<FileChangeEntry<FileMatchGlob>> takeOrNull() {
return watcher.takeOrNull();
}
public Collection<Path> takeOrNullFiles() {
return toPaths(watcher.takeOrNull());
}
public Collection<FileChangeEntry<FileMatchGlob>> getMatched() {
return watcher.getMatched();
}
public Collection<Path> getMatchedFiles() {
return watcher.getMatchedFiles();
}
public Set<Path> getMatchedFilesUnique() {
return watcher.getMatchedFilesUnique();
}
public void init(boolean registerForWatch) {
watcher.init(registerForWatch);
}
public FileMatchGlob includes(Collection<String> globs) {
return matcher.includes(globs);
}
public FileMatchGlob includes(String... globs) {
return matcher.includes(globs);
}
public FileMatchGlob excludes(Collection<String> globs) {
return matcher.excludes(globs);
}
public FileMatchGlob excludes(String... globs) {
return matcher.excludes(globs);
}
public List<PathMatcher> getExcludes() {
return matcher.getExcludes();
}
public List<PathMatcher> getIncludes() {
return matcher.getIncludes();
}
public Collection<Path> getExcluded() {
return matcher.getExcluded();
}
public int getExcludedCount() {
return matcher.getExcludedCount();
}
public boolean isCollectExcluded() {
return matcher.isCollectExcluded();
}
public boolean isCollectMatched() {
return matcher.isCollectMatched();
}
public void setCollectExcluded(boolean collectExcluded) {
matcher.setCollectExcluded(collectExcluded);
}
public void setCollectMatched(boolean collectMatched) {
matcher.setCollectMatched(collectMatched);
}
public boolean isMatch(Path path) {
return matcher.isMatch(path);
}
public boolean isExcluded(Path path) {
return matcher.isExcluded(path);
}
public Path getRootPath() {
return matcher.getRootPath();
}
public boolean isRecursive() {
return matcher.isRecursive();
}
}
|
src/main/java/hr/hrg/javawatcher/GlobWatcher.java
|
package hr.hrg.javawatcher;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/** Combination of single {@link FolderWatcher} and a {@link FileMatchGlob} to simplify simple watch situations*/
public class GlobWatcher {
FolderWatcher<FileMatchGlob> watcher = new FolderWatcher<FileMatchGlob>();
FileMatchGlob matcher;
public Path relativize(Path path) {
return matcher.relativize(path);
}
public FileMatchGlob add(FileMatchGlob matcher) {
return watcher.add(matcher);
}
public GlobWatcher(Path root, boolean recursive) {
matcher = new FileMatchGlob(root, recursive);
watcher.add(matcher);
}
/** Recursive from a root. */
public GlobWatcher(Path root) {
this(root,true);
}
public Collection<FileChangeEntry<FileMatchGlob>> poll() {
return watcher.poll();
}
public Collection<FileChangeEntry<FileMatchGlob>> takeBatch(long burstDelay) {
return watcher.takeBatch(burstDelay);
}
public Collection<FileChangeEntry<FileMatchGlob>> poll(long timeout, TimeUnit unit) throws InterruptedException {
return watcher.poll(timeout, unit);
}
public Collection<FileChangeEntry<FileMatchGlob>> take() throws InterruptedException {
return watcher.take();
}
public Collection<FileChangeEntry<FileMatchGlob>> takeOrNull() {
return watcher.takeOrNull();
}
public Collection<FileChangeEntry<FileMatchGlob>> getMatched() {
return watcher.getMatched();
}
public Collection<Path> getMatchedFiles() {
return watcher.getMatchedFiles();
}
public Set<Path> getMatchedFilesUnique() {
return watcher.getMatchedFilesUnique();
}
public void init(boolean registerForWatch) {
watcher.init(registerForWatch);
}
public FileMatchGlob includes(Collection<String> globs) {
return matcher.includes(globs);
}
public FileMatchGlob includes(String... globs) {
return matcher.includes(globs);
}
public FileMatchGlob excludes(Collection<String> globs) {
return matcher.excludes(globs);
}
public FileMatchGlob excludes(String... globs) {
return matcher.excludes(globs);
}
public List<PathMatcher> getExcludes() {
return matcher.getExcludes();
}
public List<PathMatcher> getIncludes() {
return matcher.getIncludes();
}
public Collection<Path> getExcluded() {
return matcher.getExcluded();
}
public int getExcludedCount() {
return matcher.getExcludedCount();
}
public boolean isCollectExcluded() {
return matcher.isCollectExcluded();
}
public boolean isCollectMatched() {
return matcher.isCollectMatched();
}
public void setCollectExcluded(boolean collectExcluded) {
matcher.setCollectExcluded(collectExcluded);
}
public void setCollectMatched(boolean collectMatched) {
matcher.setCollectMatched(collectMatched);
}
public boolean isMatch(Path path) {
return matcher.isMatch(path);
}
public boolean isExcluded(Path path) {
return matcher.isExcluded(path);
}
public Path getRootPath() {
return matcher.getRootPath();
}
public boolean isRecursive() {
return matcher.isRecursive();
}
}
|
extra function to get File instead FileChangeEntry
|
src/main/java/hr/hrg/javawatcher/GlobWatcher.java
|
extra function to get File instead FileChangeEntry
|
|
Java
|
mit
|
2a08ee6a3b64f4d740142f7b0c0169315f178fa0
| 0
|
kmdouglass/Micro-Manager,kmdouglass/Micro-Manager
|
///////////////////////////////////////////////////////////////////////////////
//FILE: AcquisitionPanel.java
//PROJECT: Micro-Manager
//SUBSYSTEM: ASIdiSPIM plugin
//-----------------------------------------------------------------------------
//
// AUTHOR: Nico Stuurman, Jon Daniels
//
// COPYRIGHT: University of California, San Francisco, & ASI, 2013
//
// LICENSE: This file is distributed under the BSD license.
// License text is included with the source distribution.
//
// This file is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
//
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES.
package org.micromanager.asidispim;
import org.micromanager.asidispim.Data.AcquisitionModes;
import org.micromanager.asidispim.Data.CameraModes;
import org.micromanager.asidispim.Data.Cameras;
import org.micromanager.asidispim.Data.Devices;
import org.micromanager.asidispim.Data.Joystick;
import org.micromanager.asidispim.Data.MultichannelModes;
import org.micromanager.asidispim.Data.MyStrings;
import org.micromanager.asidispim.Data.Positions;
import org.micromanager.asidispim.Data.Prefs;
import org.micromanager.asidispim.Data.Properties;
import org.micromanager.asidispim.Utils.DevicesListenerInterface;
import org.micromanager.asidispim.Utils.ListeningJPanel;
import org.micromanager.asidispim.Utils.MyDialogUtils;
import org.micromanager.asidispim.Utils.MyNumberUtils;
import org.micromanager.asidispim.Utils.PanelUtils;
import org.micromanager.asidispim.Utils.SliceTiming;
import org.micromanager.asidispim.Utils.StagePositionUpdater;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Insets;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JComboBox;
import javax.swing.JPanel;
import javax.swing.JSpinner;
import javax.swing.JToggleButton;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import net.miginfocom.swing.MigLayout;
import org.json.JSONException;
import org.json.JSONObject;
import mmcorej.CMMCore;
import mmcorej.Configuration;
import mmcorej.TaggedImage;
import org.micromanager.api.MultiStagePosition;
import org.micromanager.api.PositionList;
import org.micromanager.api.ScriptInterface;
import org.micromanager.api.ImageCache;
import org.micromanager.api.MMTags;
import org.micromanager.MMStudio;
import org.micromanager.acquisition.DefaultTaggedImageSink;
import org.micromanager.acquisition.MMAcquisition;
import org.micromanager.acquisition.TaggedImageQueue;
import org.micromanager.acquisition.TaggedImageStorageDiskDefault;
import org.micromanager.acquisition.TaggedImageStorageMultipageTiff;
import org.micromanager.imagedisplay.VirtualAcquisitionDisplay;
import org.micromanager.utils.ImageUtils;
import org.micromanager.utils.NumberUtils;
import org.micromanager.utils.FileDialogs;
import org.micromanager.utils.MDUtils;
import org.micromanager.utils.MMScriptException;
import org.micromanager.utils.ReportingUtils;
import com.swtdesigner.SwingResourceManager;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.swing.BorderFactory;
import org.micromanager.acquisition.ComponentTitledBorder;
import org.micromanager.asidispim.Data.ChannelSpec;
import org.micromanager.utils.MMFrame;
/**
*
* @author nico
* @author Jon
*/
@SuppressWarnings("serial")
public class AcquisitionPanel extends ListeningJPanel implements DevicesListenerInterface {
private final Devices devices_;
private final Properties props_;
private final Joystick joystick_;
private final Cameras cameras_;
private final Prefs prefs_;
private final Positions positions_;
private final CMMCore core_;
private final ScriptInterface gui_;
private final JCheckBox advancedSliceTimingCB_;
private final JSpinner numSlices_;
private final JComboBox numSides_;
private final JComboBox firstSide_;
private final JSpinner numScansPerSlice_;
private final JSpinner lineScanPeriod_;
private final JSpinner delayScan_;
private final JSpinner delayLaser_;
private final JSpinner delayCamera_;
private final JSpinner durationCamera_; // NB: not the same as camera exposure
private final JSpinner durationLaser_;
private final JSpinner delaySide_;
private final JLabel actualSlicePeriodLabel_;
private final JLabel actualVolumeDurationLabel_;
private final JLabel actualTimeLapseDurationLabel_;
private final JSpinner numTimepoints_;
private final JSpinner acquisitionInterval_;
private final JToggleButton buttonStart_;
private final JPanel volPanel_;
private final JPanel slicePanel_;
private final JPanel timepointPanel_;
private final JPanel savePanel_;
private final JPanel durationPanel_;
private final JTextField rootField_;
private final JTextField nameField_;
private final JLabel acquisitionStatusLabel_;
private int numTimePointsDone_;
private final AtomicBoolean cancelAcquisition_ = new AtomicBoolean(false); // true if we should stop acquisition
private final AtomicBoolean acquisitionRunning_ = new AtomicBoolean(false); // true if acquisition is in progress
private final StagePositionUpdater posUpdater_;
private final JSpinner stepSize_;
private final JLabel desiredSlicePeriodLabel_;
private final JSpinner desiredSlicePeriod_;
private final JLabel desiredLightExposureLabel_;
private final JSpinner desiredLightExposure_;
private final JButton calculateSliceTiming_;
private final JCheckBox minSlicePeriodCB_;
private final JCheckBox separateTimePointsCB_;
private final JCheckBox saveCB_;
private final JCheckBox hideCB_;
private final JComboBox spimMode_;
private final JCheckBox navigationJoysticksCB_;
private final JCheckBox usePositionsCB_;
private final JSpinner positionDelay_;
private final JCheckBox useTimepointsCB_;
private final JPanel leftColumnPanel_;
private final JPanel centerColumnPanel_;
private final MMFrame sliceFrameAdvanced_;
private SliceTiming sliceTiming_;
private final MultiChannelSubPanel multiChannelPanel_;
public AcquisitionPanel(ScriptInterface gui,
Devices devices,
Properties props,
Joystick joystick,
Cameras cameras,
Prefs prefs,
StagePositionUpdater posUpdater,
Positions positions) {
super(MyStrings.PanelNames.ACQUSITION.toString(),
new MigLayout(
"",
"[center]0[center]0[center]",
"[top]0[]"));
gui_ = gui;
devices_ = devices;
props_ = props;
joystick_ = joystick;
cameras_ = cameras;
prefs_ = prefs;
posUpdater_ = posUpdater;
positions_ = positions;
core_ = gui_.getMMCore();
numTimePointsDone_ = 0;
sliceTiming_ = new SliceTiming();
PanelUtils pu = new PanelUtils(prefs_, props_, devices_);
// added to spinner controls where we should re-calculate the displayed
// slice period, volume duration, and time lapse duration
ChangeListener recalculateTimingDisplayCL = new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
updateDurationLabels();
}
};
// added to combobox controls where we should re-calculate the displayed
// slice period, volume duration, and time lapse duration
ActionListener recalculateTimingDisplayAL = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
updateDurationLabels();
}
};
// start volume (main) sub-panel
volPanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
volPanel_.setBorder(PanelUtils.makeTitledBorder("Volume Settings"));
volPanel_.add(new JLabel("Number of sides:"));
String [] str12 = {"1", "2"};
numSides_ = pu.makeDropDownBox(str12, Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_NUM_SIDES, str12[1]);
numSides_.addActionListener(recalculateTimingDisplayAL);
volPanel_.add(numSides_, "wrap");
volPanel_.add(new JLabel("First side:"));
String[] ab = {Devices.Sides.A.toString(), Devices.Sides.B.toString()};
firstSide_ = pu.makeDropDownBox(ab, Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_FIRST_SIDE, Devices.Sides.A.toString());
volPanel_.add(firstSide_, "wrap");
volPanel_.add(new JLabel("Delay before side [ms]:"));
delaySide_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_SIDE, 0);
delaySide_.addChangeListener(recalculateTimingDisplayCL);
volPanel_.add(delaySide_, "wrap");
volPanel_.add(new JLabel("Slices per volume:"));
numSlices_ = pu.makeSpinnerInteger(1, 1000,
Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_NUM_SLICES, 20);
numSlices_.addChangeListener(recalculateTimingDisplayCL);
volPanel_.add(numSlices_, "wrap");
volPanel_.add(new JLabel("Slice step size [\u00B5m]:"));
stepSize_ = pu.makeSpinnerFloat(0, 100, 0.1,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_SLICE_STEP_SIZE,
1.0);
volPanel_.add(stepSize_, "wrap");
// out of order so we can reference it
desiredSlicePeriod_ = pu.makeSpinnerFloat(1, 1000, 0.25,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_DESIRED_SLICE_PERIOD, 30);
minSlicePeriodCB_ = pu.makeCheckBox("Minimize slice period",
Properties.Keys.PREFS_MINIMIZE_SLICE_PERIOD, panelName_, false);
minSlicePeriodCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
boolean doMin = minSlicePeriodCB_.isSelected();
desiredSlicePeriod_.setEnabled(!doMin);
if (doMin) {
recalculateSliceTiming(false);
}
}
});
volPanel_.add(minSlicePeriodCB_, "span 2, wrap");
// special field that is enabled/disabled depending on whether advanced timing is enabled
desiredSlicePeriodLabel_ = new JLabel("Slice period [ms]:");
volPanel_.add(desiredSlicePeriodLabel_);
volPanel_.add(desiredSlicePeriod_, "wrap");
desiredSlicePeriod_.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent ce) {
// make sure is multiple of 0.25
float userVal = PanelUtils.getSpinnerFloatValue(desiredSlicePeriod_);
float nearestValid = MyNumberUtils.roundToQuarterMs(userVal);
if (!MyNumberUtils.floatsEqual(userVal, nearestValid)) {
PanelUtils.setSpinnerFloatValue(desiredSlicePeriod_, nearestValid);
}
}
});
// special field that is enabled/disabled depending on whether advanced timing is enabled
desiredLightExposureLabel_ = new JLabel("Sample exposure [ms]:");
volPanel_.add(desiredLightExposureLabel_);
desiredLightExposure_ = pu.makeSpinnerFloat(2.5, 1000.5, 1,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_DESIRED_EXPOSURE, 8.5);
desiredLightExposure_.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent ce) {
// make sure is 2.5, 2.5, 3.5, ...
float val = PanelUtils.getSpinnerFloatValue(desiredLightExposure_);
float nearestValid = (float) Math.round(val+0.5f) - 0.5f;
if (!MyNumberUtils.floatsEqual(val, nearestValid)) {
PanelUtils.setSpinnerFloatValue(desiredLightExposure_, nearestValid);
}
}
});
volPanel_.add(desiredLightExposure_, "wrap");
calculateSliceTiming_ = new JButton("Calculate slice timing");
calculateSliceTiming_.setToolTipText("Must recalculate after changing the camera ROI.");
calculateSliceTiming_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
recalculateSliceTiming(!minSlicePeriodCB_.isSelected());
}
});
volPanel_.add(calculateSliceTiming_, "center, span 2, wrap");
// special checkbox to use the advanced timing settings
// action handler added below after defining components it enables/disables
advancedSliceTimingCB_ = pu.makeCheckBox("Use advanced timing settings",
Properties.Keys.PREFS_ADVANCED_SLICE_TIMING, panelName_, false);
volPanel_.add(advancedSliceTimingCB_, "left, span 2, wrap");
// end volume sub-panel
// start advanced slice timing frame
// visibility of this frame is controlled from advancedTiming checkbox
// this frame is separate from main plugin window
sliceFrameAdvanced_ = new MMFrame();
sliceFrameAdvanced_.setTitle("Advanced timing");
sliceFrameAdvanced_.loadPosition(100, 100);
slicePanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
sliceFrameAdvanced_.add(slicePanel_);
class SliceFrameAdapter extends WindowAdapter {
@Override
public void windowClosing(WindowEvent e) {
advancedSliceTimingCB_.setSelected(false);
sliceFrameAdvanced_.savePosition();
}
}
sliceFrameAdvanced_.addWindowListener(new SliceFrameAdapter());
JLabel scanDelayLabel = new JLabel("Delay before scan [ms]:");
slicePanel_.add(scanDelayLabel);
delayScan_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_SCAN, 0);
delayScan_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(delayScan_, "wrap");
JLabel lineScanLabel = new JLabel("Lines scans per slice:");
slicePanel_.add(lineScanLabel);
numScansPerSlice_ = pu.makeSpinnerInteger(1, 1000,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_NUM_SCANSPERSLICE, 1);
numScansPerSlice_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(numScansPerSlice_, "wrap");
JLabel lineScanPeriodLabel = new JLabel("Line scan period [ms]:");
slicePanel_.add(lineScanPeriodLabel);
lineScanPeriod_ = pu.makeSpinnerInteger(1, 10000,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_LINESCAN_PERIOD, 10);
lineScanPeriod_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(lineScanPeriod_, "wrap");
JLabel delayLaserLabel = new JLabel("Delay before laser [ms]:");
slicePanel_.add(delayLaserLabel);
delayLaser_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_LASER, 0);
delayLaser_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(delayLaser_, "wrap");
JLabel durationLabel = new JLabel("Laser trig duration [ms]:");
slicePanel_.add(durationLabel);
durationLaser_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DURATION_LASER, 1);
durationLaser_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(durationLaser_, "span 2, wrap");
JLabel delayLabel = new JLabel("Delay before camera [ms]:");
slicePanel_.add(delayLabel);
delayCamera_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_CAMERA, 0);
delayCamera_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(delayCamera_, "wrap");
JLabel cameraLabel = new JLabel("Camera trig duration [ms]:");
slicePanel_.add(cameraLabel);
durationCamera_ = pu.makeSpinnerFloat(0, 1000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DURATION_CAMERA, 0);
durationCamera_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(durationCamera_, "wrap");
final JComponent[] simpleTimingComponents = { desiredLightExposure_,
calculateSliceTiming_, minSlicePeriodCB_, desiredSlicePeriodLabel_,
desiredLightExposureLabel_};
componentsSetEnabled(sliceFrameAdvanced_, advancedSliceTimingCB_.isSelected());
componentsSetEnabled(simpleTimingComponents, !advancedSliceTimingCB_.isSelected());
// this action listener takes care of enabling/disabling inputs
// of the advanced slice timing window
// we call this to get GUI looking right
ItemListener sliceTimingDisableGUIInputs = new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
boolean enabled = advancedSliceTimingCB_.isSelected();
// set other components in this advanced timing frame
componentsSetEnabled(sliceFrameAdvanced_, enabled);
// also control some components in main volume settings sub-panel
componentsSetEnabled(simpleTimingComponents, !enabled);
desiredSlicePeriod_.setEnabled(!enabled && !minSlicePeriodCB_.isSelected());
}
};
// this action listener shows/hides the advanced timing frame
ActionListener showAdvancedTimingFrame = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
boolean enabled = advancedSliceTimingCB_.isSelected();
if (enabled) {
sliceFrameAdvanced_.setVisible(enabled);
}
}
};
sliceFrameAdvanced_.pack();
sliceFrameAdvanced_.setResizable(false);
// end slice Frame
// start repeat (time lapse) sub-panel
timepointPanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
useTimepointsCB_ = pu.makeCheckBox("Time points",
Properties.Keys.PREFS_USE_TIMEPOINTS, panelName_, false);
useTimepointsCB_.setToolTipText("Perform a time-lapse acquisition");
useTimepointsCB_.setEnabled(true);
useTimepointsCB_.setFocusPainted(false);
ComponentTitledBorder componentBorder =
new ComponentTitledBorder(useTimepointsCB_, timepointPanel_,
BorderFactory.createLineBorder(ASIdiSPIM.borderColor));
timepointPanel_.setBorder(componentBorder);
ChangeListener recalculateTimeLapseDisplay = new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
updateActualTimeLapseDurationLabel();
}
};
useTimepointsCB_.addChangeListener(recalculateTimeLapseDisplay);
timepointPanel_.add(new JLabel("Number:"));
numTimepoints_ = pu.makeSpinnerInteger(1, 32000,
Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_NUM_ACQUISITIONS, 1);
numTimepoints_.addChangeListener(recalculateTimeLapseDisplay);
timepointPanel_.add(numTimepoints_, "wrap");
timepointPanel_.add(new JLabel("Interval [s]:"));
acquisitionInterval_ = pu.makeSpinnerFloat(1, 32000, 0.1,
Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_ACQUISITION_INTERVAL, 60);
acquisitionInterval_.addChangeListener(recalculateTimeLapseDisplay);
timepointPanel_.add(acquisitionInterval_, "wrap");
// enable/disable panel elements depending on checkbox state
useTimepointsCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
componentsSetEnabled(timepointPanel_, useTimepointsCB_.isSelected());
}
});
componentsSetEnabled(timepointPanel_, useTimepointsCB_.isSelected()); // initialize
// end repeat sub-panel
// start savePanel
final int textFieldWidth = 16;
savePanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]8[left]",
"[]4[]"));
savePanel_.setBorder(PanelUtils.makeTitledBorder("Data Saving Settings"));
separateTimePointsCB_ = pu.makeCheckBox("Separate viewer / file for each time point",
Properties.Keys.PREFS_SEPARATE_VIEWERS_FOR_TIMEPOINTS, panelName_, false);
savePanel_.add(separateTimePointsCB_, "span 3, left, wrap");
hideCB_ = pu.makeCheckBox("Hide viewer",
Properties.Keys.PREFS_HIDE_WHILE_ACQUIRING, panelName_, false);
savePanel_.add(hideCB_, "left");
hideCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent ae) {
// if viewer is hidden then force saving to disk
if (hideCB_.isSelected()) {
if (!saveCB_.isSelected()) {
saveCB_.doClick();
}
saveCB_.setEnabled(false);
} else {
saveCB_.setEnabled(true);
}
}
});
saveCB_ = pu.makeCheckBox("Save while acquiring",
Properties.Keys.PREFS_SAVE_WHILE_ACQUIRING, panelName_, false);
// init the save while acquiring CB; could also do two doClick() calls
if (hideCB_.isSelected()) {
saveCB_.setEnabled(false);
}
savePanel_.add(saveCB_, "span 2, center, wrap");
JLabel dirRootLabel = new JLabel ("Directory root:");
savePanel_.add(dirRootLabel);
rootField_ = new JTextField();
rootField_.setText( prefs_.getString(panelName_,
Properties.Keys.PLUGIN_DIRECTORY_ROOT, "") );
rootField_.setColumns(textFieldWidth);
savePanel_.add(rootField_, "span 2");
JButton browseRootButton = new JButton();
browseRootButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
setRootDirectory(rootField_);
prefs_.putString(panelName_, Properties.Keys.PLUGIN_DIRECTORY_ROOT,
rootField_.getText());
}
});
browseRootButton.setMargin(new Insets(2, 5, 2, 5));
browseRootButton.setText("...");
savePanel_.add(browseRootButton, "wrap");
JLabel namePrefixLabel = new JLabel();
namePrefixLabel.setText("Name prefix:");
savePanel_.add(namePrefixLabel);
nameField_ = new JTextField("acq");
nameField_.setText( prefs_.getString(panelName_,
Properties.Keys.PLUGIN_NAME_PREFIX, "acq"));
nameField_.setColumns(textFieldWidth);
savePanel_.add(nameField_, "span 2, wrap");
// since we use the name field even for acquisitions in RAM,
// we only need to gray out the directory-related components
final JComponent[] saveComponents = { browseRootButton, rootField_,
dirRootLabel };
componentsSetEnabled(saveComponents, saveCB_.isSelected());
saveCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
componentsSetEnabled(saveComponents, saveCB_.isSelected());
}
});
// end save panel
// start duration report panel
durationPanel_ = new JPanel(new MigLayout(
"",
"[right]6[left, 40%!]",
"[]5[]"));
durationPanel_.setBorder(PanelUtils.makeTitledBorder("Durations"));
durationPanel_.setPreferredSize(new Dimension(125, 0)); // fix width so it doesn't constantly change depending on text
durationPanel_.add(new JLabel("Slice:"));
actualSlicePeriodLabel_ = new JLabel();
durationPanel_.add(actualSlicePeriodLabel_, "wrap");
durationPanel_.add(new JLabel("Volume:"));
actualVolumeDurationLabel_ = new JLabel();
durationPanel_.add(actualVolumeDurationLabel_, "wrap");
durationPanel_.add(new JLabel("Total:"));
actualTimeLapseDurationLabel_ = new JLabel();
durationPanel_.add(actualTimeLapseDurationLabel_, "wrap");
// end duration report panel
buttonStart_ = new JToggleButton();
buttonStart_.setIconTextGap(6);
buttonStart_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (acquisitionRunning_.get()) {
cancelAcquisition_.set(true);
} else {
runAcquisition();
}
}
});
updateStartButton(); // call once to initialize, isSelected() will be false
acquisitionStatusLabel_ = new JLabel("");
updateAcquisitionStatus(AcquisitionStatus.NONE);
// Channel Panel (separate file for code)
multiChannelPanel_ = new MultiChannelSubPanel(gui, devices_, props_, prefs_);
multiChannelPanel_.addDurationLabelListener(this);
// Position Panel
final JPanel positionPanel = new JPanel();
positionPanel.setLayout(new MigLayout("flowx, fillx","[right]10[left][10][]","[]8[]"));
usePositionsCB_ = pu.makeCheckBox("Multiple positions (XY)",
Properties.Keys.PREFS_USE_MULTIPOSITION, panelName_, false);
usePositionsCB_.setToolTipText("Acquire datasest at multiple postions");
usePositionsCB_.setEnabled(true);
usePositionsCB_.setFocusPainted(false);
componentBorder =
new ComponentTitledBorder(usePositionsCB_, positionPanel,
BorderFactory.createLineBorder(ASIdiSPIM.borderColor));
positionPanel.setBorder(componentBorder);
final JButton editPositionListButton = new JButton("Edit position list...");
editPositionListButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
gui_.showXYPositionList();
}
});
positionPanel.add(editPositionListButton, "span 2, center");
// add empty fill space on right side of panel
positionPanel.add(new JLabel(""), "wrap, growx");
positionPanel.add(new JLabel("Post-move delay [ms]:"));
positionDelay_ = pu.makeSpinnerFloat(0.0, 10000.0, 100.0,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_POSITION_DELAY,
0.0);
positionPanel.add(positionDelay_, "wrap");
// enable/disable panel elements depending on checkbox state
usePositionsCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
componentsSetEnabled(positionPanel, usePositionsCB_.isSelected());
}
});
componentsSetEnabled(positionPanel, usePositionsCB_.isSelected()); // initialize
// end of Position panel
// checkbox to use navigation joystick settings or not
// an "orphan" UI element
navigationJoysticksCB_ = new JCheckBox("Use Navigation joystick settings");
navigationJoysticksCB_.setSelected(prefs_.getBoolean(panelName_,
Properties.Keys.PLUGIN_USE_NAVIGATION_JOYSTICKS, false));
navigationJoysticksCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (navigationJoysticksCB_.isSelected()) {
ASIdiSPIM.getFrame().getNavigationPanel().doJoystickSettings();
} else {
joystick_.unsetAllJoysticks();
}
prefs_.putBoolean(panelName_, Properties.Keys.PLUGIN_USE_NAVIGATION_JOYSTICKS,
navigationJoysticksCB_.isSelected());
}
});
// set up tabbed panels for GUI
// make 3 columns as own JPanels to get vertical space right
// in each column without dependencies on other columns
leftColumnPanel_ = new JPanel(new MigLayout(
"",
"[]",
"[]6[]10[]10[]"));
leftColumnPanel_.add(durationPanel_, "split 2");
leftColumnPanel_.add(timepointPanel_, "wrap, growx");
leftColumnPanel_.add(savePanel_, "wrap");
leftColumnPanel_.add(new JLabel("SPIM mode: "), "split 2, left");
AcquisitionModes acqModes = new AcquisitionModes(devices_, props_, prefs_);
spimMode_ = acqModes.getComboBox();
leftColumnPanel_.add(spimMode_, "wrap");
leftColumnPanel_.add(buttonStart_, "split 2, left");
leftColumnPanel_.add(acquisitionStatusLabel_);
centerColumnPanel_ = new JPanel(new MigLayout(
"",
"[]",
"[]"));
centerColumnPanel_.add(positionPanel, "growx, wrap");
centerColumnPanel_.add(multiChannelPanel_, "wrap");
centerColumnPanel_.add(navigationJoysticksCB_, "wrap");
// add the column panels to the main panel
this.add(leftColumnPanel_);
this.add(centerColumnPanel_);
this.add(volPanel_);
// properly initialize the advanced slice timing
advancedSliceTimingCB_.addItemListener(sliceTimingDisableGUIInputs);
sliceTimingDisableGUIInputs.itemStateChanged(null);
//advancedSliceTimingCB_.doClick();
//advancedSliceTimingCB_.doClick();
advancedSliceTimingCB_.addActionListener(showAdvancedTimingFrame);
updateDurationLabels();
// for easy timing mode, calculate slice timing to start
if (!advancedSliceTimingCB_.isSelected()
&& checkCamerasAssigned(false)) {
calculateSliceTiming_.doClick();
}
}//end constructor
public final void updateDurationLabels() {
updateActualSlicePeriodLabel();
updateActualVolumeDurationLabel();
updateActualTimeLapseDurationLabel();
}
/**
* Sets the acquisition name prefix programmatically.
* Added so that name prefix can be changed from a script.
* @param acqName
*/
public void setAcquisitionNamePrefix(String acqName) {
nameField_.setText(acqName);
}
private void updateStartButton() {
boolean started = acquisitionRunning_.get();
buttonStart_.setSelected(started);
buttonStart_.setText(started ? "Stop!" : "Start!");
buttonStart_.setBackground(started ? Color.red : Color.green);
buttonStart_.setIcon(started ?
SwingResourceManager.
getIcon(MMStudio.class,
"/org/micromanager/icons/cancel.png")
: SwingResourceManager.getIcon(MMStudio.class,
"/org/micromanager/icons/arrow_right.png"));
}
/**
* @return either "A" or "B"
*/
private String getFirstSide() {
return (String)firstSide_.getSelectedItem();
}
private boolean isFirstSideA() {
return getFirstSide().equals("A");
}
/**
* @return either 1 or 2
*/
private int getNumSides() {
if (numSides_.getSelectedIndex() == 1) {
return 2;
} else {
return 1;
}
}
private boolean isTwoSided() {
return (numSides_.getSelectedIndex() == 1);
}
private int getNumTimepoints() {
if (!useTimepointsCB_.isSelected()) {
return 1;
}
return (Integer) numTimepoints_.getValue();
}
private int getNumChannels() {
if (!multiChannelPanel_.isPanelEnabled()) {
return 1;
}
return multiChannelPanel_.getUsedChannels().length;
}
private int getLineScanPeriod() {
return (Integer) lineScanPeriod_.getValue();
}
private int getNumScansPerSlice() {
return (Integer) numScansPerSlice_.getValue();
}
private int getNumSlices() {
return (Integer) numSlices_.getValue();
}
private double getStepSizeUm() {
return PanelUtils.getSpinnerFloatValue(stepSize_);
}
/**
*
* @param showWarnings true to warn user about needing to change slice period
* @return
*/
private SliceTiming getTimingFromPeriodAndLightExposure(boolean showWarnings) {
// uses algorithm Jon worked out in Octave code; each slice period goes like this:
// 1. camera readout time (none if in overlap mode)
// 2. any extra delay time
// 3. camera reset time
// 4. start scan 0.25ms before camera global exposure and shifted up in time to account for delay introduced by Bessel filter
// 5. turn on laser as soon as camera global exposure, leave laser on for desired light exposure time
// 7. end camera exposure in final 0.25ms, post-filter scan waveform also ends now
final float scanLaserBufferTime = 0.25f;
final Color foregroundColorOK = Color.BLACK;
final Color foregroundColorError = Color.RED;
final Component elementToColor = desiredSlicePeriod_.getEditor().getComponent(0);
SliceTiming s = new SliceTiming();
float cameraResetTime = computeCameraResetTime(); // recalculate for safety
float cameraReadoutTime = computeCameraReadoutTime(); // recalculate for safety
// get delay between trigger and when exposure timer starts so we can
// decrease camera exposure accordingly
// for now simply recover "overhead time" in computeCameraReasetTime()
// if readout/reset calculations change then this may need to be more sophisticated
float cameraExposureDelayTime = cameraResetTime - cameraReadoutTime;
float desiredPeriod = minSlicePeriodCB_.isSelected() ? 0 :
PanelUtils.getSpinnerFloatValue(desiredSlicePeriod_);
float desiredExposure = PanelUtils.getSpinnerFloatValue(desiredLightExposure_);
float cameraReadout_max = MyNumberUtils.ceilToQuarterMs(cameraReadoutTime);
float cameraReset_max = MyNumberUtils.ceilToQuarterMs(cameraResetTime);
float slicePeriod = MyNumberUtils.roundToQuarterMs(desiredPeriod);
int scanPeriod = Math.round(desiredExposure + 2*scanLaserBufferTime);
// scan will be longer than laser by 0.25ms at both start and end
float laserDuration = scanPeriod - 2*scanLaserBufferTime; // will be integer plus 0.5
// computer "extra" per-slice time: period minus camera reset and readout times minus (scan time - 0.25ms)
// the last 0.25ms correction comes because we start the scan 0.25ms before camera global exposure
float globalDelay = slicePeriod - cameraReadout_max - cameraReset_max - scanPeriod + scanLaserBufferTime;
// if calculated delay is negative then we have to reduce exposure time in 1 sec increments
if (globalDelay < 0) {
float extraTimeNeeded = MyNumberUtils.ceilToQuarterMs(-1f*globalDelay); // positive number
globalDelay += extraTimeNeeded;
if (showWarnings) {
MyDialogUtils.showError(
"Increasing slice period to meet laser exposure constraint\n"
+ "(time required for camera readout; readout time depends on ROI).\n");
elementToColor.setForeground(foregroundColorError);
// considered actually changing the value, but decided against it because
// maybe the user just needs to set the ROI appropriately and recalculate
} else {
elementToColor.setForeground(foregroundColorOK);
}
} else {
elementToColor.setForeground(foregroundColorOK);
}
// account for delay in scan position based on Bessel filter by starting the scan slightly earlier
// than we otherwise would; delay is (empirically) ~0.33/(freq in kHz)
// find better results adding 0.4/(freq in kHz) though
// group delay for bessel filter approx 1/w or ~0.16/freq, or half/third the empirical value (not sure why discrepancy)
float scanFilterFreq = Math.max(props_.getPropValueFloat(Devices.Keys.GALVOA, Properties.Keys.SCANNER_FILTER_X),
props_.getPropValueFloat(Devices.Keys.GALVOB, Properties.Keys.SCANNER_FILTER_X));
float scanDelayFilter = 0;
if (scanFilterFreq != 0) {
scanDelayFilter = MyNumberUtils.roundToQuarterMs(0.4f/scanFilterFreq);
}
// Add 0.25ms to globalDelay if it is 0 and we are on overlap mode and scan has been shifted forward
// basically the last 0.25ms of scan time that would have determined the slice period isn't
// there any more because the scan time is moved up => add in the 0.25ms at the start of the slice
// in edge or level trigger mode the camera trig falling edge marks the end of the slice period
// not sure if PCO pseudo-overlap needs this, probably not because adding 0.25ms overhead in that case
if (MyNumberUtils.floatsEqual(cameraReadout_max, 0f) // true iff overlap being used
&& (scanDelayFilter > 0.01f)) {
globalDelay += 0.25f;
}
// If the PLogic card is used, account for 0.25ms delay it introduces
// to the camera and laser trigger signals => subtract 0.25ms to the scanner delay
// (start scanner 0.25ms later than it would be otherwise)
// (really it is 0.25ms minus the evaluation time to generate the signals)
// this time-shift opposes the Bessel filter delay
if (devices_.isValidMMDevice(Devices.Keys.PLOGIC)) {
scanDelayFilter -= 0.25f;
}
s.scanDelay = cameraReadout_max + globalDelay + cameraReset_max - scanDelayFilter - scanLaserBufferTime;
s.scanNum = 1;
s.scanPeriod = scanPeriod;
s.laserDelay = cameraReadout_max + globalDelay + cameraReset_max;
s.laserDuration = laserDuration;
s.cameraDelay = cameraReadout_max + globalDelay;
s.cameraDuration = cameraReset_max + scanPeriod - scanLaserBufferTime; // approx. same as exposure, can be used in bulb mode
s.cameraExposure = s.cameraDuration
- 0.01f // give up 0.10ms of our 0.25ms overhead here because camera might round up
// from the set exposure time and thus exceeding total period
- cameraExposureDelayTime;
// change camera duration for overlap mode to be short trigger
// needed because exposure time is set by difference between pulses in this mode
CameraModes.Keys cameraMode = CameraModes.getKeyFromPrefCode(
prefs_.getInt(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PLUGIN_CAMERA_MODE, 0));
if (cameraMode == CameraModes.Keys.OVERLAP) {
// for Hamamatsu's "synchronous" or Zyla's "overlap" mode
// send single short trigger
s.cameraDuration = 1;
}
return s;
}
/**
* @return true if the slice timing matches the current user parameters and ROI
*/
private boolean isSliceTimingUpToDate() {
SliceTiming newTiming = getTimingFromPeriodAndLightExposure(false);
return sliceTiming_.equals(newTiming);
}
/**
* Re-calculate the controller's timing settings for "easy timing" mode.
* If the values are the same nothing happens. If they should be changed,
* then the controller's properties will be set.
* @param showWarnings will show warning if the user-specified slice period too short
*/
private void recalculateSliceTiming(boolean showWarnings) {
if(!checkCamerasAssigned(true)) {
return;
}
sliceTiming_ = getTimingFromPeriodAndLightExposure(showWarnings);
PanelUtils.setSpinnerFloatValue(delayScan_, sliceTiming_.scanDelay);
numScansPerSlice_.setValue(sliceTiming_.scanNum);
lineScanPeriod_.setValue(sliceTiming_.scanPeriod);
PanelUtils.setSpinnerFloatValue(delayLaser_, sliceTiming_.laserDelay);
PanelUtils.setSpinnerFloatValue(durationLaser_, sliceTiming_.laserDuration);
PanelUtils.setSpinnerFloatValue(delayCamera_, sliceTiming_.cameraDelay);
PanelUtils.setSpinnerFloatValue(durationCamera_, sliceTiming_.cameraDuration );
}
/**
* Compute slice period in ms based on controller's timing settings.
* @return period in ms
*/
private double computeActualSlicePeriod() {
double period = Math.max(Math.max(
PanelUtils.getSpinnerFloatValue(delayScan_) + // scan time
(getLineScanPeriod() * getNumScansPerSlice()),
PanelUtils.getSpinnerFloatValue(delayLaser_)
+ PanelUtils.getSpinnerFloatValue(durationLaser_) // laser time
),
PanelUtils.getSpinnerFloatValue(delayCamera_)
+ PanelUtils.getSpinnerFloatValue(durationCamera_) // camera time
);
return period;
}
/**
* Update the displayed slice period.
*/
private void updateActualSlicePeriodLabel() {
actualSlicePeriodLabel_.setText(
NumberUtils.doubleToDisplayString(computeActualSlicePeriod()) +
" ms");
}
/**
* Compute the volume duration in ms based on controller's timing settings.
* @return duration in ms
*/
private double computeActualVolumeDuration() {
double duration = getNumSides() * getNumChannels() *
(PanelUtils.getSpinnerFloatValue(delaySide_) +
getNumSlices() * computeActualSlicePeriod());
return duration;
}
/**
* Update the displayed volume duration.
*/
private void updateActualVolumeDurationLabel() {
actualVolumeDurationLabel_.setText(
NumberUtils.doubleToDisplayString(computeActualVolumeDuration()) +
" ms");
}
/**
* Compute the time lapse duration
* @return duration in s
*/
private double computeActualTimeLapseDuration() {
double duration = (getNumTimepoints() - 1) *
PanelUtils.getSpinnerFloatValue(acquisitionInterval_)
+ computeActualVolumeDuration()/1000;
return duration;
}
/**
* Update the displayed time lapse duration.
*/
private void updateActualTimeLapseDurationLabel() {
String s = "";
double duration = computeActualTimeLapseDuration();
if (duration < 60) { // less than 1 min
s += NumberUtils.doubleToDisplayString(duration) + " s";
} else if (duration < 60*60) { // between 1 min and 1 hour
s += NumberUtils.doubleToDisplayString(Math.floor(duration/60)) + " min ";
s += NumberUtils.doubleToDisplayString(Math.round(duration % 60)) + " s";
} else { // longer than 1 hour
s += NumberUtils.doubleToDisplayString(Math.floor(duration/(60*60))) + " hr ";
s += NumberUtils.doubleToDisplayString(Math.round((duration % (60*60))/60)) + " min";
}
actualTimeLapseDurationLabel_.setText(s);
}
/**
* Computes the reset time of the SPIM cameras set on Devices panel.
* Handles single-side operation.
* Needed for computing (semi-)optimized slice timing in "easy timing" mode.
* @return
*/
private float computeCameraResetTime() {
float resetTime;
if (isTwoSided()) {
resetTime = Math.max(cameras_.computeCameraResetTime(Devices.Keys.CAMERAA),
cameras_.computeCameraResetTime(Devices.Keys.CAMERAB));
} else {
if (isFirstSideA()) {
resetTime = cameras_.computeCameraResetTime(Devices.Keys.CAMERAA);
} else {
resetTime = cameras_.computeCameraResetTime(Devices.Keys.CAMERAB);
}
}
return resetTime;
}
/**
* Computes the readout time of the SPIM cameras set on Devices panel.
* Handles single-side operation.
* Needed for computing (semi-)optimized slice timing in "easy timing" mode.
* @return
*/
private float computeCameraReadoutTime() {
float readoutTime;
CameraModes.Keys camMode = CameraModes.getKeyFromPrefCode(
prefs_.getInt(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PLUGIN_CAMERA_MODE, 0));
boolean isOverlap = (camMode == CameraModes.Keys.OVERLAP ||
camMode == CameraModes.Keys.PSEUDO_OVERLAP);
if (isTwoSided()) {
readoutTime = Math.max(cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAA, isOverlap),
cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAB, isOverlap));
} else {
if (isFirstSideA()) {
readoutTime = cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAA, isOverlap);
} else {
readoutTime = cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAB, isOverlap);
}
}
return readoutTime;
}
/**
* Makes sure that cameras are assigned to the desired sides and display error message
* if not (e.g. if single-sided with side B first, then only checks camera for side B)
* @return true if cameras assigned, false if not
*/
private boolean checkCamerasAssigned(boolean showWarnings) {
String firstCamera, secondCamera;
boolean firstSideA = isFirstSideA();
if (firstSideA) {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
} else {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
}
if (firstCamera == null) {
if (showWarnings) {
MyDialogUtils.showError("Please select a valid camera for the first side (Imaging Path " +
(firstSideA ? "A" : "B") + ") on the Devices Panel");
}
return false;
}
if (isTwoSided() && secondCamera == null) {
if (showWarnings) {
MyDialogUtils.showError("Please select a valid camera for the second side (Imaging Path " +
(firstSideA ? "B" : "A") + ") on the Devices Panel.");
}
return false;
}
return true;
}
/**
* used for updateAcquisitionStatus() calls
*/
private static enum AcquisitionStatus {
NONE,
ACQUIRING,
WAITING,
DONE,
}
private void updateAcquisitionStatus(AcquisitionStatus phase) {
updateAcquisitionStatus(phase, 0);
}
private void updateAcquisitionStatus(AcquisitionStatus phase, int secsToNextAcquisition) {
String text = "";
switch(phase) {
case NONE:
text = "No acquisition in progress.";
break;
case ACQUIRING:
text = "Acquiring time point "
+ NumberUtils.intToDisplayString(numTimePointsDone_)
+ " of "
+ NumberUtils.intToDisplayString(getNumTimepoints());
break;
case WAITING:
text = "Finished "
+ NumberUtils.intToDisplayString(numTimePointsDone_)
+ " of "
+ NumberUtils.intToDisplayString(getNumTimepoints())
+ " time points; next in "
+ NumberUtils.intToDisplayString(secsToNextAcquisition)
+ " s.";
break;
case DONE:
text = "Acquisition finished with "
+ NumberUtils.intToDisplayString(numTimePointsDone_)
+ " time points.";
break;
default:
break;
}
acquisitionStatusLabel_.setText(text);
}
/**
* call setEnabled(boolean) on all components in list
* @param components
* @param enabled
*/
private static void componentsSetEnabled(JComponent[] components, boolean enabled) {
for (JComponent c : components) {
c.setEnabled(enabled);
}
}
/**
* call setEnabled(boolean) on all components in frame/panel
* @param panel
* @param enabled
*/
private static void componentsSetEnabled(Container container, boolean enabled) {
for (Component comp : container.getComponents()) {
comp.setEnabled(enabled);
}
}
/**
* Sets all the controller's properties according to volume settings
* and otherwise gets controller all ready for acquisition
* (except for final trigger).
* @param side
* @return false if there was some error that should abort acquisition
*/
private boolean prepareControllerForAquisition(Devices.Sides side) {
Devices.Keys galvoDevice = Devices.getSideSpecificKey(Devices.Keys.GALVOA, side);
Devices.Keys piezoDevice = Devices.getSideSpecificKey(Devices.Keys.PIEZOA, side);
boolean ignoreMissingScanner = prefs_.getBoolean(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PREFS_IGNORE_MISSING_SCANNER, false);
boolean haveMissingScanner = !devices_.isValidMMDevice(galvoDevice);
boolean skipScannerWarnings = ignoreMissingScanner && haveMissingScanner;
// checks to prevent hard-to-diagnose other errors
if (!ignoreMissingScanner && haveMissingScanner) {
MyDialogUtils.showError("Scanner device required; please check Devices tab.");
return false;
}
// if we are changing color slice by slice then set controller to do multiple slices per piezo move
// otherwise just set to 1 slice per piezo move
int numSlicesPerPiezo = 1;
if (props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE)
== MultichannelModes.Keys.SLICE_HW.getPrefCode()) {
numSlicesPerPiezo = getNumChannels();
}
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_SLICES_PER_PIEZO,
numSlicesPerPiezo, skipScannerWarnings);
// if we are changing color volume by volume then set controller to do multiple volumes per start trigger
// otherwise just set to 1 volume per start trigger
int numVolumesPerTrigger = 1;
if (props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE)
== MultichannelModes.Keys.VOLUME_HW.getPrefCode()) {
numVolumesPerTrigger = getNumChannels();
}
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_REPEATS, numVolumesPerTrigger, skipScannerWarnings);
AcquisitionModes.Keys spimMode = (AcquisitionModes.Keys) spimMode_.getSelectedItem();
// figure out the piezo parameters
int numSlices = getNumSlices();
float piezoAmplitude = ( (numSlices - 1) *
PanelUtils.getSpinnerFloatValue(stepSize_));
float piezoCenter = prefs_.getFloat(
MyStrings.PanelNames.SETUP.toString() + side.toString(),
Properties.Keys.PLUGIN_PIEZO_CENTER_POS, 0);
// if we set piezoAmplitude to 0 here then sliceAmplitude will also be 0
if (spimMode.equals(AcquisitionModes.Keys.NO_SCAN)) {
piezoAmplitude = 0.0f;
}
// tweak the parameters if we are using synchronous/overlap mode
// object is to get exact same piezo/scanner positions in first
// N frames (piezo/scanner will move to N+1st position but no image taken)
CameraModes.Keys cameraMode = CameraModes.getKeyFromPrefCode(
prefs_.getInt(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PLUGIN_CAMERA_MODE, 0));
if (cameraMode == CameraModes.Keys.OVERLAP) {
piezoAmplitude *= ((float)numSlices)/(numSlices-1);
piezoCenter += piezoAmplitude/(2*numSlices);
numSlices += 1;
}
float sliceRate = prefs_.getFloat(
MyStrings.PanelNames.SETUP.toString() + side.toString(),
Properties.Keys.PLUGIN_RATE_PIEZO_SHEET, -80);
if (MyNumberUtils.floatsEqual(sliceRate, 0.0f)) {
MyDialogUtils.showError("Rate for slice " + side.toString() +
" cannot be zero. Re-do calibration on Setup tab.");
return false;
}
float sliceOffset = prefs_.getFloat(
MyStrings.PanelNames.SETUP.toString() + side.toString(),
Properties.Keys.PLUGIN_OFFSET_PIEZO_SHEET, 0);
float sliceAmplitude = piezoAmplitude / sliceRate;
float sliceCenter = (piezoCenter - sliceOffset) / sliceRate;
// get the micro-mirror card ready
// SA_AMPLITUDE_X_DEG and SA_OFFSET_X_DEG done by setup tabs
boolean triangleWave = prefs_.getBoolean(
MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PREFS_SCAN_OPPOSITE_DIRECTIONS, true);
Properties.Values scanPattern = triangleWave ?
Properties.Values.SAM_TRIANGLE : Properties.Values.SAM_RAMP;
props_.setPropValue(galvoDevice, Properties.Keys.SA_PATTERN_X,
scanPattern, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SA_AMPLITUDE_Y_DEG,
sliceAmplitude, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SA_OFFSET_Y_DEG,
sliceCenter, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.BEAM_ENABLED,
Properties.Values.NO, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_SLICES,
numSlices, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_SIDES,
getNumSides(), skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_FIRSTSIDE,
getFirstSide(), skipScannerWarnings);
// get the piezo card ready; skip if no piezo specified
if (devices_.isValidMMDevice(piezoDevice)) {
if (spimMode.equals(AcquisitionModes.Keys.SLICE_SCAN_ONLY)) {
piezoAmplitude = 0.0f;
}
props_.setPropValue(piezoDevice,
Properties.Keys.SA_AMPLITUDE, piezoAmplitude);
props_.setPropValue(piezoDevice,
Properties.Keys.SA_OFFSET, piezoCenter);
props_.setPropValue(piezoDevice,
Properties.Keys.SPIM_NUM_SLICES, numSlices);
props_.setPropValue(piezoDevice,
Properties.Keys.SPIM_STATE, Properties.Values.SPIM_ARMED);
}
return true;
}
/**
* Gets the associated PLogic BNC from the channel (containing preset name)
* @param channel
* @return value 5, 6, 7, or 8; returns 0 if there is an error
*/
private int getPLogicOutputFromChannel(ChannelSpec channel) {
try {
Configuration configData = core_.getConfigData(multiChannelPanel_.getChannelGroup(), channel.config_);
if (!configData.isPropertyIncluded(devices_.getMMDevice(Devices.Keys.PLOGIC), Properties.Keys.PLOGIC_OUTPUT_CHANNEL.toString())) {
MyDialogUtils.showError("Must include PLogic \"OutputChannel\" in preset for hardware switching");
return 0;
}
String setting = configData.getSetting(devices_.getMMDevice(Devices.Keys.PLOGIC), Properties.Keys.PLOGIC_OUTPUT_CHANNEL.toString()).getPropertyValue();
if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC5.toString())) {
return 5;
} else if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC6.toString())) {
return 6;
} else if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC7.toString())) {
return 7;
} else if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC8.toString())) {
return 8;
} else {
MyDialogUtils.showError("Channel preset setting must use PLogic \"OutputChannel\" and be set to one of outputs 5-8 only");
return 0;
}
} catch (Exception e) {
MyDialogUtils.showError(e, "Could not get PLogic output from channel");
return 0;
}
}
/**
* Programs the PLogic card for hardware channel switching
* according to the selections in the Multichannel subpanel
* @return false if there is a fatal error, true if successful
*/
private boolean setupHardwareChannelSwitching() {
final int counterLSBAddress = 3;
final int counterMSBAddress = 4;
final int laserTTLAddress = 42;
final int invertAddress = 64;
if (!devices_.isValidMMDevice(Devices.Keys.PLOGIC)) {
MyDialogUtils.showError("PLogic card required for hardware switching");
return false;
}
// set up clock for counters
MultichannelModes.Keys prefCode = MultichannelModes.getKeyFromPrefCode(
props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE));
switch (prefCode) {
case SLICE_HW:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_CLOCK_LASER);
break;
case VOLUME_HW:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_CLOCK_SIDE);
break;
default:
MyDialogUtils.showError("Unknown multichannel mode for hardware switching");
return false;
}
// set up hardware counter
switch (getNumChannels()) {
case 1:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_1);
break;
case 2:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_2);
break;
case 3:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_3);
break;
case 4:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_4);
break;
default:
MyDialogUtils.showError("Hardware channel switching only supports 1-4 channels");
return false;
}
// speed things up by turning off updates, will restore value later
String editCellUpdates = props_.getPropValueString(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES, Properties.Values.NO);
// initialize cells 13-16 which control BNCs 5-8
for (int cellNum=13; cellNum<=16; cellNum++) {
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_POINTER_POSITION, cellNum);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_TYPE, Properties.Values.PLOGIC_AND4);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_2, laserTTLAddress);
// note that PLC diSPIM assumes "laser + side" output mode is selected for micro-mirror card
}
// identify BNC from the preset and set counter inputs for 13-16 appropriately
ChannelSpec[] channels = multiChannelPanel_.getUsedChannels();
for (int channelNum = 0; channelNum < channels.length; channelNum++) {
// we already know there are between 1 and 4 channels
int outputNum = getPLogicOutputFromChannel(channels[channelNum]);
if (outputNum<5) {
// restore update setting
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES, editCellUpdates);
return false; // already displayed error
}
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_POINTER_POSITION, outputNum + 8);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_1, invertAddress); // enable this AND4
// map the channel number to the equivalent addresses for the AND4
// inputs should be either 3 (for LSB high) or 67 (for LSB low)
// and 4 (for MSB high) or 68 (for MSB low)
int in3 = (channelNum & 0x01) > 0 ? counterLSBAddress : counterLSBAddress + invertAddress;
int in4 = (channelNum & 0x10) > 0 ? counterMSBAddress : counterMSBAddress + invertAddress;
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_3, in3);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_4, in4);
}
// make sure cells 13-16 are controlling BNCs 5-8
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_BNC5_8_ON_13_16);
// restore update setting
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES, editCellUpdates);
return true;
}
/**
* Implementation of acquisition that orchestrates image
* acquisition itself rather than using the acquisition engine.
*
* This methods is public so that the ScriptInterface can call it
* Please do not access this yourself directly, instead use the API, e.g.
* import org.micromanager.asidispim.api.*;
* ASIdiSPIMInterface diSPIM = new ASIdiSPIMImplementation();
* diSPIM.runAcquisition();
*/
public void runAcquisition() {
class acqThread extends Thread {
acqThread(String threadName) {
super(threadName);
}
@Override
public void run() {
ReportingUtils.logMessage("User requested start of diSPIM acquisition.");
cancelAcquisition_.set(false);
acquisitionRunning_.set(true);
updateStartButton();
boolean success = runAcquisitionPrivate();
if (!success) {
ReportingUtils.logError("Fatal error running diSPIM acquisition.");
}
acquisitionRunning_.set(false);
updateStartButton();
}
}
acqThread acqt = new acqThread("diSPIM Acquisition");
acqt.start();
}
/**
* Actually runs the acquisition; does the dirty work of setting
* up the controller, the circular buffer, starting the cameras,
* grabbing the images and putting them into the acquisition, etc.
* @return true if ran without any fatal errors.
*/
private boolean runAcquisitionPrivate() {
if (gui_.isAcquisitionRunning()) {
MyDialogUtils.showError("An acquisition is already running");
return false;
}
boolean liveModeOriginally = gui_.isLiveModeOn();
if (liveModeOriginally) {
gui_.enableLiveMode(false);
}
// get MM device names for first/second cameras to acquire
String firstCamera, secondCamera;
boolean firstSideA = isFirstSideA();
if (firstSideA) {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
} else {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
}
boolean sideActiveA, sideActiveB;
boolean twoSided = isTwoSided();
if (twoSided) {
sideActiveA = true;
sideActiveB = true;
} else {
secondCamera = null;
if (firstSideA) {
sideActiveA = true;
sideActiveB = false;
} else {
sideActiveA = false;
sideActiveB = true;
}
}
int nrSides = getNumSides();
int nrSlices = getNumSlices();
int nrChannels = getNumChannels();
// set up channels
int nrChannelsSoftware = nrChannels; // how many times we trigger the controller
int nrSlicesSoftware = nrSlices;
String originalConfig = "";
boolean changeChannelPerVolumeSoftware = false;
boolean useChannels = multiChannelPanel_.isPanelEnabled();
MultichannelModes.Keys multichannelMode = MultichannelModes.Keys.NONE;
if (useChannels) {
if (nrChannels < 1) {
MyDialogUtils.showError("\"Channels\" is checked, but no channels are selected");
return false;
}
multichannelMode = MultichannelModes.getKeyFromPrefCode(
props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE));
switch (multichannelMode) {
case VOLUME:
changeChannelPerVolumeSoftware = true;
multiChannelPanel_.initializeChannelCycle();
// get current channel so that we can restore it
// I tried core_.get/setSystemStateCache, but that made the Tiger controller very confused and I had to re-apply the firmware
originalConfig = multiChannelPanel_.getCurrentConfig();
break;
case VOLUME_HW:
case SLICE_HW:
if (!setupHardwareChannelSwitching()) {
return false;
}
nrChannelsSoftware = 1;
nrSlicesSoftware = nrSlices * nrChannels;
break;
default:
MyDialogUtils.showError("Unsupported multichannel mode \"" + multichannelMode.toString() + "\"");
return false;
}
}
// set up XY positions
int nrPositions = 1;
boolean usePositions = usePositionsCB_.isSelected();
PositionList positionList = new PositionList();
if (usePositions) {
try {
positionList = gui_.getPositionList();
nrPositions = positionList.getNumberOfPositions();
} catch (MMScriptException ex) {
MyDialogUtils.showError(ex, "Error getting position list for multiple XY positions");
}
if (nrPositions < 1) {
MyDialogUtils.showError("\"Positions\" is checked, but no positions are in position list");
return false;
}
}
// make sure we have cameras selected
if (!checkCamerasAssigned(true)) {
return false;
}
// make sure slice timings are up to date
if (!advancedSliceTimingCB_.isSelected()) {
if(!isSliceTimingUpToDate()) {
MyDialogUtils.showError("Slice timing is not up to date, please recalculate.");
return false;
}
}
float cameraReadoutTime = computeCameraReadoutTime();
double exposureTime = sliceTiming_.cameraExposure;
boolean show = !hideCB_.isSelected();
boolean save = saveCB_.isSelected();
boolean singleTimePointViewers = separateTimePointsCB_.isSelected();
String rootDir = rootField_.getText();
int nrRepeats; // how many acquisition windows to open
int nrFrames; // how many Micro-manager "frames" = time points to take
if (singleTimePointViewers) {
nrFrames = 1;
nrRepeats = getNumTimepoints();
} else {
nrFrames = getNumTimepoints();
nrRepeats = 1;
}
long timepointsIntervalMs = Math.round(
PanelUtils.getSpinnerFloatValue(acquisitionInterval_) * 1000d);
AcquisitionModes.Keys spimMode = (AcquisitionModes.Keys) spimMode_.getSelectedItem();
boolean autoShutter = core_.getAutoShutter();
boolean shutterOpen = false;
// more sanity checks
double lineScanTime = computeActualSlicePeriod();
if (exposureTime + cameraReadoutTime > lineScanTime) {
MyDialogUtils.showError("Exposure time is longer than time needed for a line scan.\n" +
"This will result in dropped frames.\n" +
"Please change input");
return false;
}
double volumeDuration = computeActualVolumeDuration();
if (getNumTimepoints() > 1) {
if (timepointsIntervalMs < volumeDuration) {
MyDialogUtils.showError("Time point interval shorter than" +
" the time to collect a single volume.\n");
return false;
}
// TODO verify if 0.5 second is good value for overhead time
if (timepointsIntervalMs < (volumeDuration + 500)) {
MyDialogUtils.showError("Micro-Manager requires ~0.5 second overhead time "
+ "to finish up a volume before starting next one. "
+ "Pester the developers if you need faster, it is probably possible.");
return false;
}
}
if (nrRepeats > 10 && separateTimePointsCB_.isSelected()) {
if (!MyDialogUtils.getConfirmDialogResult(
"This will generate " + nrRepeats + " separate windows. "
+ "Do you really want to proceed?",
JOptionPane.OK_CANCEL_OPTION)) {
return false;
}
}
if (hideCB_.isSelected() && !saveCB_.isSelected()) {
MyDialogUtils.showError("Must save data to disk if viewer is hidden");
return false;
}
if (hideCB_.isSelected() && separateTimePointsCB_.isSelected()) {
MyDialogUtils.showError("Cannot have hidden viewer with separate viewers per time point." +
"Pester the developers if you really need this.");
return false;
}
// it appears the circular buffer, which is used by both cameras, can only have one
// image size setting => we require same image height and width for second camera if two-sided
if (twoSided) {
try {
Rectangle roi_1 = core_.getROI(firstCamera);
Rectangle roi_2 = core_.getROI(secondCamera);
if (roi_1.width != roi_2.width || roi_1.height != roi_2.height) {
MyDialogUtils.showError("Camera ROI height and width must be equal because of Micro-Manager's circular buffer");
return false;
}
} catch (Exception ex) {
MyDialogUtils.showError(ex, "Problem getting camera ROIs");
}
}
// empty out circular buffer
try {
core_.clearCircularBuffer();
} catch (Exception ex) {
MyDialogUtils.showError(ex, "Error emptying out the circular buffer");
return false;
}
cameras_.setSPIMCamerasForAcquisition(true);
// stop the serial traffic for position updates during acquisition
posUpdater_.setAcqRunning(true);
numTimePointsDone_ = 0;
// force saving as image stacks, not individual files
// implementation assumes just two options, either
// TaggedImageStorageDiskDefault.class or TaggedImageStorageMultipageTiff.class
boolean separateImageFilesOriginally =
ImageUtils.getImageStorageClass().equals(TaggedImageStorageDiskDefault.class);
ImageUtils.setImageStorageClass(TaggedImageStorageMultipageTiff.class);
// Set up controller SPIM parameters (including from Setup panel settings)
if (sideActiveA) {
boolean success = prepareControllerForAquisition(Devices.Sides.A);
if (! success) {
return false;
}
}
if (sideActiveB) {
boolean success = prepareControllerForAquisition(Devices.Sides.B);
if (! success) {
return false;
}
}
// sets PLogic BNC3 output high to indicate acquisition is going on
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_3, true);
long acqStart = System.currentTimeMillis();
boolean nonfatalError = false;
// do not want to return from within this loop
// loop is executed once per acquisition (i.e. once if separate viewers isn't selected)
for (int tp = 0; tp < nrRepeats; tp++) {
BlockingQueue<TaggedImage> bq = new LinkedBlockingQueue<TaggedImage>(10);
String acqName;
if (singleTimePointViewers) {
acqName = gui_.getUniqueAcquisitionName(nameField_.getText() + "_" + tp);
} else {
acqName = gui_.getUniqueAcquisitionName(nameField_.getText());
}
try {
// check for stop button before each acquisition
if (cancelAcquisition_.get()) {
throw new IllegalMonitorStateException("User stopped the acquisition");
}
ReportingUtils.logMessage("diSPIM plugin starting acquisition " + acqName);
if (spimMode.equals(AcquisitionModes.Keys.NO_SCAN) && ! singleTimePointViewers) {
// swap nrFrames and nrSlices
gui_.openAcquisition(acqName, rootDir, nrSlices, nrSides * nrChannels,
nrFrames, nrPositions, show, save);
} else {
gui_.openAcquisition(acqName, rootDir, nrFrames, nrSides * nrChannels,
nrSlices, nrPositions, show, save);
}
core_.setExposure(firstCamera, exposureTime);
if (twoSided) {
core_.setExposure(secondCamera, exposureTime);
}
// set up channels (side A/B is treated as channel too)
if (useChannels) {
ChannelSpec[] channels = multiChannelPanel_.getUsedChannels();
for (int i = 0; i < channels.length; i++) {
String chName = "-" + channels[i].config_;
// same algorithm for channel index vs. specified channel and side as below
int channelIndex = i;
if (twoSided) {
channelIndex *= 2;
}
gui_.setChannelName(acqName, channelIndex, firstCamera + chName);
if (twoSided) {
gui_.setChannelName(acqName, channelIndex + 1, secondCamera + chName);
}
}
} else {
gui_.setChannelName(acqName, 0, firstCamera);
if (twoSided) {
gui_.setChannelName(acqName, 1, secondCamera);
}
}
// initialize acquisition
gui_.initializeAcquisition(acqName, (int) core_.getImageWidth(),
(int) core_.getImageHeight(), (int) core_.getBytesPerPixel(),
(int) core_.getImageBitDepth());
// These metadata have to added after initialization, otherwise
// they will not be shown?!
gui_.setAcquisitionProperty(acqName, "NumberOfSides",
NumberUtils.doubleToDisplayString(getNumSides()) );
String firstSide = "B";
if (firstSideA) {
firstSide = "A";
}
gui_.setAcquisitionProperty(acqName, "FirstSide", firstSide);
gui_.setAcquisitionProperty(acqName, "SlicePeriod_ms",
actualSlicePeriodLabel_.getText());
gui_.setAcquisitionProperty(acqName, "LaserExposure_ms",
NumberUtils.doubleToDisplayString(
(double)PanelUtils.getSpinnerFloatValue(durationLaser_)));
gui_.setAcquisitionProperty(acqName, "VolumeDuration",
actualVolumeDurationLabel_.getText());
gui_.setAcquisitionProperty(acqName, "SPIMmode",
((AcquisitionModes.Keys) spimMode_.getSelectedItem()).toString());
// Multi-page TIFF saving code wants this one:
// TODO: support other types than GRAY16
gui_.setAcquisitionProperty(acqName, "PixelType", "GRAY16");
gui_.setAcquisitionProperty(acqName, "z-step_um",
NumberUtils.doubleToDisplayString(getStepSizeUm()) );
// get circular buffer ready
// do once here but not per-acquisition; need to ensure ROI changes registered
core_.initializeCircularBuffer();
// TODO: use new acquisition interface that goes through the pipeline
//gui_.setAcquisitionAddImageAsynchronous(acqName);
MMAcquisition acq = gui_.getAcquisition(acqName);
// Dive into MM internals since script interface does not support pipelines
ImageCache imageCache = acq.getImageCache();
VirtualAcquisitionDisplay vad = acq.getAcquisitionWindow();
imageCache.addImageCacheListener(vad);
// Start pumping images into the ImageCache
DefaultTaggedImageSink sink = new DefaultTaggedImageSink(bq, imageCache);
sink.start();
// Loop over all the times we trigger the controller's acquisition
// If the interval between frames is shorter than the time to acquire
// them, we can switch to hardware based solution. Not sure how important
// that feature is, so leave it out for now.
for (int timePoint = 0; timePoint < nrFrames; timePoint++) {
// handle intervals between time points
long acqNow = System.currentTimeMillis();
long delay = acqStart + timePoint * timepointsIntervalMs - acqNow;
while (delay > 0 && !cancelAcquisition_.get()) {
updateAcquisitionStatus(AcquisitionStatus.WAITING, (int) (delay / 1000));
long sleepTime = Math.min(1000, delay);
Thread.sleep(sleepTime);
acqNow = System.currentTimeMillis();
delay = acqStart + timePoint * timepointsIntervalMs - acqNow;
}
// check for stop button before each time point
if (cancelAcquisition_.get()) {
throw new IllegalMonitorStateException("User stopped the acquisition");
}
numTimePointsDone_++;
updateAcquisitionStatus(AcquisitionStatus.ACQUIRING);
// loop over all positions
for (int positionNum = 0; positionNum < nrPositions; positionNum++) {
if (usePositions) {
// blocking call; will wait for stages to move
MultiStagePosition.goToPosition(positionList.getPosition(positionNum), core_);
// wait any extra time the user requests
Thread.sleep(Math.round(PanelUtils.getSpinnerFloatValue(positionDelay_)));
}
// loop over all the times we trigger the controller
for (int channelNum = 0; channelNum < nrChannelsSoftware; channelNum++) {
// start the cameras
core_.startSequenceAcquisition(firstCamera, nrSlicesSoftware, 0, true);
if (twoSided) {
core_.startSequenceAcquisition(secondCamera, nrSlicesSoftware, 0, true);
}
// deal with shutter
if (autoShutter) {
core_.setAutoShutter(false);
shutterOpen = core_.getShutterOpen();
if (!shutterOpen) {
core_.setShutterOpen(true);
}
}
// deal with channel if needed (hardware channel switching doesn't happen here)
if (changeChannelPerVolumeSoftware) {
multiChannelPanel_.selectNextChannel();
}
// trigger the Tiger controller
// TODO generalize this for different ways of running SPIM
// only matters which device we trigger if there are two micro-mirror cards
if (firstSideA) {
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_RUNNING, true);
} else {
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_RUNNING, true);
}
ReportingUtils.logDebugMessage("Starting time point " + (timePoint+1) + " of " + nrFrames
+ " with channel number " + channelNum);
// Wait for first image to create ImageWindow, so that we can be sure about image size
// Do not actually grab first image here, just make sure it is there
long start = System.currentTimeMillis();
long now = start;
long timeout; // wait 5 seconds for first image to come
timeout = Math.max(5000, Math.round(1.2*computeActualVolumeDuration()));
while (core_.getRemainingImageCount() == 0 && (now - start < timeout)
&& !cancelAcquisition_.get()) {
now = System.currentTimeMillis();
Thread.sleep(5);
}
if (now - start >= timeout) {
throw new Exception("Camera did not send first image within a reasonable time");
}
// grab all the images from the cameras, put them into the acquisition
int[] frNumber = new int[nrChannels*2]; // keep track of how many frames we have received for each "channel" (MM channel is our channel * 2 for the 2 cameras)
int[] cameraFrNumber = new int[2]; // keep track of how many frames we have received from the camera
boolean done = false;
long timeout2; // how long to wait between images before timing out
timeout2 = Math.max(2000, Math.round(5*computeActualSlicePeriod()));
start = System.currentTimeMillis();
long last = start;
try {
while ((core_.getRemainingImageCount() > 0
|| core_.isSequenceRunning(firstCamera)
|| (twoSided && core_.isSequenceRunning(secondCamera)))
&& !done) {
now = System.currentTimeMillis();
if (core_.getRemainingImageCount() > 0) { // we have an image to grab
TaggedImage timg = core_.popNextTaggedImage();
String camera = (String) timg.tags.get("Camera");
// figure out which channel index the acquisition is using
int cameraIndex = camera.equals(firstCamera) ? 0: 1;
int channelIndex;
switch (multichannelMode) {
case NONE:
case VOLUME:
channelIndex = channelNum;
break;
case VOLUME_HW:
channelIndex = cameraFrNumber[cameraIndex] / nrSlices; // want quotient only
break;
case SLICE_HW:
channelIndex = cameraFrNumber[cameraIndex] % nrChannels; // want modulo arithmetic
break;
default:
// should never get here
throw new Exception("Undefined channel mode");
}
// 2nd camera always gets odd channel index
// second side always comes after first side
if (twoSided) {
channelIndex *= 2;
}
channelIndex += cameraIndex;
// add image to acquisition
if (spimMode.equals(AcquisitionModes.Keys.NO_SCAN) && ! singleTimePointViewers) {
// create time series for no scan
addImageToAcquisition(acqName,
frNumber[channelIndex], channelIndex, timePoint,
positionNum, now - acqStart, timg, bq);
} else { // standard, create Z-stacks
addImageToAcquisition(acqName, timePoint, channelIndex,
frNumber[channelIndex], positionNum,
now - acqStart, timg, bq);
}
// update our counters
frNumber[channelIndex]++;
cameraFrNumber[cameraIndex]++;
last = now; // keep track of last image timestamp
} else { // no image ready yet
done = cancelAcquisition_.get();
Thread.sleep(1);
if (now - last >= timeout2) {
ReportingUtils.logError("Camera did not send all expected images within" +
" a reasonable period for timepoint " + (timePoint+1) + ". Continuing anyway.");
// allow other time points to continue by stopping acquisition manually
// (in normal case the sequence acquisition stops itself after
// all the expected images are returned)
if (core_.isSequenceRunning(firstCamera)) {
core_.stopSequenceAcquisition(firstCamera);
}
if (twoSided && core_.isSequenceRunning(secondCamera)) {
core_.stopSequenceAcquisition(secondCamera);
}
nonfatalError = true;
done = true;
}
}
}
// update count if we stopped in the middle
if (cancelAcquisition_.get()) {
numTimePointsDone_--;
}
} catch (InterruptedException iex) {
MyDialogUtils.showError(iex);
}
}
}
}
} catch (IllegalMonitorStateException ex) {
// do nothing, the acquisition was simply halted during its operation
} catch (MMScriptException mex) {
MyDialogUtils.showError(mex);
} catch (Exception ex) {
MyDialogUtils.showError(ex);
} finally { // end of this acquisition (could be about to restart if separate viewers)
try {
if (core_.isSequenceRunning(firstCamera)) {
core_.stopSequenceAcquisition(firstCamera);
}
if (twoSided && core_.isSequenceRunning(secondCamera)) {
core_.stopSequenceAcquisition(secondCamera);
}
if (autoShutter) {
core_.setAutoShutter(true);
if (shutterOpen) {
core_.setShutterOpen(false);
}
}
bq.put(TaggedImageQueue.POISON);
// TODO: evaluate closeAcquisition call
// at the moment, the Micro-Manager api has a bug that causes
// a closed acquisition not be really closed, causing problems
// when the user closes a window of the previous acquisition
// changed r14705 (2014-11-24)
// gui_.closeAcquisition(acqName);
ReportingUtils.logMessage("diSPIM plugin acquisition " + acqName +
" took: " + (System.currentTimeMillis() - acqStart) + "ms");
} catch (Exception ex) {
// exception while stopping sequence acquisition, not sure what to do...
MyDialogUtils.showError(ex, "Problem while finsihing acquisition");
}
}
}
// cleanup after end of all acquisitions
// reset channel to original
if (changeChannelPerVolumeSoftware) {
multiChannelPanel_.setConfig(originalConfig);
}
// the controller will end with both beams disabled and scan off so reflect
// that in device properties
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.BEAM_ENABLED,
Properties.Values.NO, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.BEAM_ENABLED,
Properties.Values.NO, true);
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SA_MODE_X,
Properties.Values.SAM_DISABLED, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SA_MODE_X,
Properties.Values.SAM_DISABLED, true);
// sets BNC3 output low again
// this only happens after images have all been received (or timeout occurred)
// but if using DemoCam devices then it happens too early
// at least part of the problem is that both DemoCam devices "acquire" at the same time
// instead of actually obeying the controller's triggers
// as a result with DemoCam the side select (BNC4) isn't correct
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_2, true);
// move piezos back to center (neutral) position
if (devices_.isValidMMDevice(Devices.Keys.PIEZOA)) {
positions_.setPosition(Devices.Keys.PIEZOA, Joystick.Directions.NONE, 0.0);
}
if (devices_.isValidMMDevice(Devices.Keys.PIEZOB)) {
positions_.setPosition(Devices.Keys.PIEZOB, Joystick.Directions.NONE, 0.0);
}
if (cancelAcquisition_.get()) { // if user stopped us in middle
// make sure to stop the SPIM state machine in case the acquisition was cancelled
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_IDLE, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_IDLE, true);
}
updateAcquisitionStatus(AcquisitionStatus.DONE);
posUpdater_.setAcqRunning(false);
if (separateImageFilesOriginally) {
ImageUtils.setImageStorageClass(TaggedImageStorageDiskDefault.class);
}
cameras_.setSPIMCamerasForAcquisition(false);
if (liveModeOriginally) {
gui_.enableLiveMode(true);
}
if (nonfatalError) {
MyDialogUtils.showError("Non-fatal error occurred during acquisition, see core log for details");
}
return true;
}
@Override
public void saveSettings() {
prefs_.putString(panelName_, Properties.Keys.PLUGIN_DIRECTORY_ROOT,
rootField_.getText());
prefs_.putString(panelName_, Properties.Keys.PLUGIN_NAME_PREFIX,
nameField_.getText());
// save controller settings
props_.setPropValue(Devices.Keys.PIEZOA, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.PIEZOB, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
}
/**
* Gets called when this tab gets focus. Refreshes values from properties.
*/
@Override
public void gotSelected() {
posUpdater_.pauseUpdates(true);
props_.callListeners();
if (navigationJoysticksCB_.isSelected()) {
if (ASIdiSPIM.getFrame() != null) {
ASIdiSPIM.getFrame().getNavigationPanel().doJoystickSettings();
}
} else {
joystick_.unsetAllJoysticks(); // disable all joysticks on this tab
}
sliceFrameAdvanced_.setVisible(advancedSliceTimingCB_.isSelected());
posUpdater_.pauseUpdates(false);
}
/**
* called when tab looses focus.
*/
@Override
public void gotDeSelected() {
sliceFrameAdvanced_.setVisible(false);
saveSettings();
}
@Override
public void devicesChangedAlert() {
devices_.callListeners();
}
/**
* Gets called when enclosing window closes
*/
public void windowClosing() {
sliceFrameAdvanced_.savePosition();
sliceFrameAdvanced_.dispose();
}
@Override
public void refreshDisplay() {
updateDurationLabels();
}
private void setRootDirectory(JTextField rootField) {
File result = FileDialogs.openDir(null,
"Please choose a directory root for image data",
MMStudio.MM_DATA_SET);
if (result != null) {
rootField.setText(result.getAbsolutePath());
}
}
/**
* The basic method for adding images to an existing data set. If the
* acquisition was not previously initialized, it will attempt to initialize
* it from the available image data. This version uses a blocking queue and is
* much faster than the one currently implemented in the ScriptInterface
* Eventually, this function should be replaced by the ScriptInterface version
* of the same.
* @param name - named acquisition to add image to
* @param frame - frame nr at which to insert the image
* @param channel - channel at which to insert image
* @param slice - (z) slice at which to insert image
* @param position - position at which to insert image
* @param ms - Time stamp to be added to the image metadata
* @param taggedImg - image + metadata to be added
* @param bq - Blocking queue to which the image should be added. This queue
* should be hooked up to the ImageCache belonging to this acquisitions
* @throws java.lang.InterruptedException
* @throws org.micromanager.utils.MMScriptException
*/
public void addImageToAcquisition(String name,
int frame,
int channel,
int slice,
int position,
long ms,
TaggedImage taggedImg,
BlockingQueue<TaggedImage> bq) throws MMScriptException, InterruptedException {
MMAcquisition acq = gui_.getAcquisition(name);
// verify position number is allowed
if (acq.getPositions() <= position) {
throw new MMScriptException("The position number must not exceed declared"
+ " number of positions (" + acq.getPositions() + ")");
}
// verify that channel number is allowed
if (acq.getChannels() <= channel) {
throw new MMScriptException("The channel number must not exceed declared"
+ " number of channels (" + + acq.getChannels() + ")");
}
JSONObject tags = taggedImg.tags;
if (!acq.isInitialized()) {
throw new MMScriptException("Error in the ASIdiSPIM logic. Acquisition should have been initialized");
}
// create required coordinate tags
try {
MDUtils.setFrameIndex(tags, frame);
tags.put(MMTags.Image.FRAME, frame);
MDUtils.setChannelIndex(tags, channel);
MDUtils.setSliceIndex(tags, slice);
MDUtils.setPositionIndex(tags, position);
MDUtils.setElapsedTimeMs(tags, ms);
MDUtils.setImageTime(tags, MDUtils.getCurrentTime());
MDUtils.setZStepUm(tags, PanelUtils.getSpinnerFloatValue(stepSize_));
if (!tags.has(MMTags.Summary.SLICES_FIRST) && !tags.has(MMTags.Summary.TIME_FIRST)) {
// add default setting
tags.put(MMTags.Summary.SLICES_FIRST, true);
tags.put(MMTags.Summary.TIME_FIRST, false);
}
if (acq.getPositions() > 1) {
// if no position name is defined we need to insert a default one
if (tags.has(MMTags.Image.POS_NAME)) {
tags.put(MMTags.Image.POS_NAME, "Pos" + position);
}
}
// update frames if necessary
if (acq.getFrames() <= frame) {
acq.setProperty(MMTags.Summary.FRAMES, Integer.toString(frame + 1));
}
} catch (JSONException e) {
throw new MMScriptException(e);
}
bq.put(taggedImg);
}
}
|
plugins/ASIdiSPIM/src/org/micromanager/asidispim/AcquisitionPanel.java
|
///////////////////////////////////////////////////////////////////////////////
//FILE: AcquisitionPanel.java
//PROJECT: Micro-Manager
//SUBSYSTEM: ASIdiSPIM plugin
//-----------------------------------------------------------------------------
//
// AUTHOR: Nico Stuurman, Jon Daniels
//
// COPYRIGHT: University of California, San Francisco, & ASI, 2013
//
// LICENSE: This file is distributed under the BSD license.
// License text is included with the source distribution.
//
// This file is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
//
// IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES.
package org.micromanager.asidispim;
import org.micromanager.asidispim.Data.AcquisitionModes;
import org.micromanager.asidispim.Data.CameraModes;
import org.micromanager.asidispim.Data.Cameras;
import org.micromanager.asidispim.Data.Devices;
import org.micromanager.asidispim.Data.Joystick;
import org.micromanager.asidispim.Data.MultichannelModes;
import org.micromanager.asidispim.Data.MyStrings;
import org.micromanager.asidispim.Data.Positions;
import org.micromanager.asidispim.Data.Prefs;
import org.micromanager.asidispim.Data.Properties;
import org.micromanager.asidispim.Utils.DevicesListenerInterface;
import org.micromanager.asidispim.Utils.ListeningJPanel;
import org.micromanager.asidispim.Utils.MyDialogUtils;
import org.micromanager.asidispim.Utils.MyNumberUtils;
import org.micromanager.asidispim.Utils.PanelUtils;
import org.micromanager.asidispim.Utils.SliceTiming;
import org.micromanager.asidispim.Utils.StagePositionUpdater;
import java.awt.Color;
import java.awt.Component;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Insets;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JOptionPane;
import javax.swing.JTextField;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JComboBox;
import javax.swing.JPanel;
import javax.swing.JSpinner;
import javax.swing.JToggleButton;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import net.miginfocom.swing.MigLayout;
import org.json.JSONException;
import org.json.JSONObject;
import mmcorej.CMMCore;
import mmcorej.Configuration;
import mmcorej.TaggedImage;
import org.micromanager.api.MultiStagePosition;
import org.micromanager.api.PositionList;
import org.micromanager.api.ScriptInterface;
import org.micromanager.api.ImageCache;
import org.micromanager.api.MMTags;
import org.micromanager.MMStudio;
import org.micromanager.acquisition.DefaultTaggedImageSink;
import org.micromanager.acquisition.MMAcquisition;
import org.micromanager.acquisition.TaggedImageQueue;
import org.micromanager.acquisition.TaggedImageStorageDiskDefault;
import org.micromanager.acquisition.TaggedImageStorageMultipageTiff;
import org.micromanager.imagedisplay.VirtualAcquisitionDisplay;
import org.micromanager.utils.ImageUtils;
import org.micromanager.utils.NumberUtils;
import org.micromanager.utils.FileDialogs;
import org.micromanager.utils.MDUtils;
import org.micromanager.utils.MMScriptException;
import org.micromanager.utils.ReportingUtils;
import com.swtdesigner.SwingResourceManager;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.swing.BorderFactory;
import org.micromanager.acquisition.ComponentTitledBorder;
import org.micromanager.asidispim.Data.ChannelSpec;
import org.micromanager.utils.MMFrame;
/**
*
* @author nico
* @author Jon
*/
@SuppressWarnings("serial")
public class AcquisitionPanel extends ListeningJPanel implements DevicesListenerInterface {
private final Devices devices_;
private final Properties props_;
private final Joystick joystick_;
private final Cameras cameras_;
private final Prefs prefs_;
private final Positions positions_;
private final CMMCore core_;
private final ScriptInterface gui_;
private final JCheckBox advancedSliceTimingCB_;
private final JSpinner numSlices_;
private final JComboBox numSides_;
private final JComboBox firstSide_;
private final JSpinner numScansPerSlice_;
private final JSpinner lineScanPeriod_;
private final JSpinner delayScan_;
private final JSpinner delayLaser_;
private final JSpinner delayCamera_;
private final JSpinner durationCamera_; // NB: not the same as camera exposure
private final JSpinner durationLaser_;
private final JSpinner delaySide_;
private final JLabel actualSlicePeriodLabel_;
private final JLabel actualVolumeDurationLabel_;
private final JLabel actualTimeLapseDurationLabel_;
private final JSpinner numTimepoints_;
private final JSpinner acquisitionInterval_;
private final JToggleButton buttonStart_;
private final JPanel volPanel_;
private final JPanel slicePanel_;
private final JPanel timepointPanel_;
private final JPanel savePanel_;
private final JPanel durationPanel_;
private final JTextField rootField_;
private final JTextField nameField_;
private final JLabel acquisitionStatusLabel_;
private int numTimePointsDone_;
private final AtomicBoolean cancelAcquisition_ = new AtomicBoolean(false); // true if we should stop acquisition
private final AtomicBoolean acquisitionRunning_ = new AtomicBoolean(false); // true if acquisition is in progress
private final StagePositionUpdater posUpdater_;
private final JSpinner stepSize_;
private final JLabel desiredSlicePeriodLabel_;
private final JSpinner desiredSlicePeriod_;
private final JLabel desiredLightExposureLabel_;
private final JSpinner desiredLightExposure_;
private final JButton calculateSliceTiming_;
private final JCheckBox minSlicePeriodCB_;
private final JCheckBox separateTimePointsCB_;
private final JCheckBox saveCB_;
private final JCheckBox hideCB_;
private final JComboBox spimMode_;
private final JCheckBox navigationJoysticksCB_;
private final JCheckBox usePositionsCB_;
private final JSpinner positionDelay_;
private final JCheckBox useTimepointsCB_;
private final JPanel leftColumnPanel_;
private final JPanel centerColumnPanel_;
private final MMFrame sliceFrameAdvanced_;
private SliceTiming sliceTiming_;
private final MultiChannelSubPanel multiChannelPanel_;
public AcquisitionPanel(ScriptInterface gui,
Devices devices,
Properties props,
Joystick joystick,
Cameras cameras,
Prefs prefs,
StagePositionUpdater posUpdater,
Positions positions) {
super(MyStrings.PanelNames.ACQUSITION.toString(),
new MigLayout(
"",
"[center]0[center]0[center]",
"[top]0[]"));
gui_ = gui;
devices_ = devices;
props_ = props;
joystick_ = joystick;
cameras_ = cameras;
prefs_ = prefs;
posUpdater_ = posUpdater;
positions_ = positions;
core_ = gui_.getMMCore();
numTimePointsDone_ = 0;
sliceTiming_ = new SliceTiming();
PanelUtils pu = new PanelUtils(prefs_, props_, devices_);
// added to spinner controls where we should re-calculate the displayed
// slice period, volume duration, and time lapse duration
ChangeListener recalculateTimingDisplayCL = new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
updateDurationLabels();
}
};
// added to combobox controls where we should re-calculate the displayed
// slice period, volume duration, and time lapse duration
ActionListener recalculateTimingDisplayAL = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
updateDurationLabels();
}
};
// start volume (main) sub-panel
volPanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
volPanel_.setBorder(PanelUtils.makeTitledBorder("Volume Settings"));
volPanel_.add(new JLabel("Number of sides:"));
String [] str12 = {"1", "2"};
numSides_ = pu.makeDropDownBox(str12, Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_NUM_SIDES, str12[1]);
numSides_.addActionListener(recalculateTimingDisplayAL);
volPanel_.add(numSides_, "wrap");
volPanel_.add(new JLabel("First side:"));
String[] ab = {Devices.Sides.A.toString(), Devices.Sides.B.toString()};
firstSide_ = pu.makeDropDownBox(ab, Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_FIRST_SIDE, Devices.Sides.A.toString());
volPanel_.add(firstSide_, "wrap");
volPanel_.add(new JLabel("Delay before side [ms]:"));
delaySide_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_SIDE, 0);
delaySide_.addChangeListener(recalculateTimingDisplayCL);
volPanel_.add(delaySide_, "wrap");
volPanel_.add(new JLabel("Slices per volume:"));
numSlices_ = pu.makeSpinnerInteger(1, 1000,
Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_NUM_SLICES, 20);
numSlices_.addChangeListener(recalculateTimingDisplayCL);
volPanel_.add(numSlices_, "wrap");
volPanel_.add(new JLabel("Slice step size [\u00B5m]:"));
stepSize_ = pu.makeSpinnerFloat(0, 100, 0.1,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_SLICE_STEP_SIZE,
1.0);
volPanel_.add(stepSize_, "wrap");
// out of order so we can reference it
desiredSlicePeriod_ = pu.makeSpinnerFloat(1, 1000, 0.25,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_DESIRED_SLICE_PERIOD, 30);
minSlicePeriodCB_ = pu.makeCheckBox("Minimize slice period",
Properties.Keys.PREFS_MINIMIZE_SLICE_PERIOD, panelName_, false);
minSlicePeriodCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
boolean doMin = minSlicePeriodCB_.isSelected();
desiredSlicePeriod_.setEnabled(!doMin);
if (doMin) {
recalculateSliceTiming(false);
}
}
});
volPanel_.add(minSlicePeriodCB_, "span 2, wrap");
// special field that is enabled/disabled depending on whether advanced timing is enabled
desiredSlicePeriodLabel_ = new JLabel("Slice period [ms]:");
volPanel_.add(desiredSlicePeriodLabel_);
volPanel_.add(desiredSlicePeriod_, "wrap");
desiredSlicePeriod_.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent ce) {
// make sure is multiple of 0.25
float userVal = PanelUtils.getSpinnerFloatValue(desiredSlicePeriod_);
float nearestValid = MyNumberUtils.roundToQuarterMs(userVal);
if (!MyNumberUtils.floatsEqual(userVal, nearestValid)) {
PanelUtils.setSpinnerFloatValue(desiredSlicePeriod_, nearestValid);
}
}
});
// special field that is enabled/disabled depending on whether advanced timing is enabled
desiredLightExposureLabel_ = new JLabel("Sample exposure [ms]:");
volPanel_.add(desiredLightExposureLabel_);
desiredLightExposure_ = pu.makeSpinnerFloat(2.5, 1000.5, 1,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_DESIRED_EXPOSURE, 8.5);
desiredLightExposure_.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent ce) {
// make sure is 2.5, 2.5, 3.5, ...
float val = PanelUtils.getSpinnerFloatValue(desiredLightExposure_);
float nearestValid = (float) Math.round(val+0.5f) - 0.5f;
if (!MyNumberUtils.floatsEqual(val, nearestValid)) {
PanelUtils.setSpinnerFloatValue(desiredLightExposure_, nearestValid);
}
}
});
volPanel_.add(desiredLightExposure_, "wrap");
calculateSliceTiming_ = new JButton("Calculate slice timing");
calculateSliceTiming_.setToolTipText("Must recalculate after changing the camera ROI.");
calculateSliceTiming_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
recalculateSliceTiming(!minSlicePeriodCB_.isSelected());
}
});
volPanel_.add(calculateSliceTiming_, "center, span 2, wrap");
// special checkbox to use the advanced timing settings
// action handler added below after defining components it enables/disables
advancedSliceTimingCB_ = pu.makeCheckBox("Use advanced timing settings",
Properties.Keys.PREFS_ADVANCED_SLICE_TIMING, panelName_, false);
volPanel_.add(advancedSliceTimingCB_, "left, span 2, wrap");
// end volume sub-panel
// start advanced slice timing frame
// visibility of this frame is controlled from advancedTiming checkbox
// this frame is separate from main plugin window
sliceFrameAdvanced_ = new MMFrame();
sliceFrameAdvanced_.setTitle("Advanced timing");
sliceFrameAdvanced_.loadPosition(100, 100);
slicePanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
sliceFrameAdvanced_.add(slicePanel_);
class SliceFrameAdapter extends WindowAdapter {
@Override
public void windowClosing(WindowEvent e) {
advancedSliceTimingCB_.setSelected(false);
sliceFrameAdvanced_.savePosition();
}
}
sliceFrameAdvanced_.addWindowListener(new SliceFrameAdapter());
JLabel scanDelayLabel = new JLabel("Delay before scan [ms]:");
slicePanel_.add(scanDelayLabel);
delayScan_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_SCAN, 0);
delayScan_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(delayScan_, "wrap");
JLabel lineScanLabel = new JLabel("Lines scans per slice:");
slicePanel_.add(lineScanLabel);
numScansPerSlice_ = pu.makeSpinnerInteger(1, 1000,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_NUM_SCANSPERSLICE, 1);
numScansPerSlice_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(numScansPerSlice_, "wrap");
JLabel lineScanPeriodLabel = new JLabel("Line scan period [ms]:");
slicePanel_.add(lineScanPeriodLabel);
lineScanPeriod_ = pu.makeSpinnerInteger(1, 10000,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_LINESCAN_PERIOD, 10);
lineScanPeriod_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(lineScanPeriod_, "wrap");
JLabel delayLaserLabel = new JLabel("Delay before laser [ms]:");
slicePanel_.add(delayLaserLabel);
delayLaser_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_LASER, 0);
delayLaser_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(delayLaser_, "wrap");
JLabel durationLabel = new JLabel("Laser trig duration [ms]:");
slicePanel_.add(durationLabel);
durationLaser_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DURATION_LASER, 1);
durationLaser_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(durationLaser_, "span 2, wrap");
JLabel delayLabel = new JLabel("Delay before camera [ms]:");
slicePanel_.add(delayLabel);
delayCamera_ = pu.makeSpinnerFloat(0, 10000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DELAY_CAMERA, 0);
delayCamera_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(delayCamera_, "wrap");
JLabel cameraLabel = new JLabel("Camera trig duration [ms]:");
slicePanel_.add(cameraLabel);
durationCamera_ = pu.makeSpinnerFloat(0, 1000, 0.25,
new Devices.Keys[]{Devices.Keys.GALVOA, Devices.Keys.GALVOB},
Properties.Keys.SPIM_DURATION_CAMERA, 0);
durationCamera_.addChangeListener(recalculateTimingDisplayCL);
slicePanel_.add(durationCamera_, "wrap");
final JComponent[] simpleTimingComponents = { desiredLightExposure_,
calculateSliceTiming_, minSlicePeriodCB_, desiredSlicePeriodLabel_,
desiredLightExposureLabel_};
componentsSetEnabled(sliceFrameAdvanced_, advancedSliceTimingCB_.isSelected());
componentsSetEnabled(simpleTimingComponents, !advancedSliceTimingCB_.isSelected());
// this action listener takes care of enabling/disabling inputs
// of the advanced slice timing window
// we call this to get GUI looking right
ItemListener sliceTimingDisableGUIInputs = new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
boolean enabled = advancedSliceTimingCB_.isSelected();
// set other components in this advanced timing frame
componentsSetEnabled(sliceFrameAdvanced_, enabled);
// also control some components in main volume settings sub-panel
componentsSetEnabled(simpleTimingComponents, !enabled);
desiredSlicePeriod_.setEnabled(!enabled && !minSlicePeriodCB_.isSelected());
}
};
// this action listener shows/hides the advanced timing frame
ActionListener showAdvancedTimingFrame = new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
boolean enabled = advancedSliceTimingCB_.isSelected();
if (enabled) {
sliceFrameAdvanced_.setVisible(enabled);
}
}
};
sliceFrameAdvanced_.pack();
sliceFrameAdvanced_.setResizable(false);
// end slice Frame
// start repeat (time lapse) sub-panel
timepointPanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]",
"[]8[]"));
useTimepointsCB_ = pu.makeCheckBox("Time points",
Properties.Keys.PREFS_USE_TIMEPOINTS, panelName_, false);
useTimepointsCB_.setToolTipText("Perform a time-lapse acquisition");
useTimepointsCB_.setEnabled(true);
useTimepointsCB_.setFocusPainted(false);
ComponentTitledBorder componentBorder =
new ComponentTitledBorder(useTimepointsCB_, timepointPanel_,
BorderFactory.createLineBorder(ASIdiSPIM.borderColor));
timepointPanel_.setBorder(componentBorder);
ChangeListener recalculateTimeLapseDisplay = new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
updateActualTimeLapseDurationLabel();
}
};
useTimepointsCB_.addChangeListener(recalculateTimeLapseDisplay);
timepointPanel_.add(new JLabel("Number:"));
numTimepoints_ = pu.makeSpinnerInteger(1, 32000,
Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_NUM_ACQUISITIONS, 1);
numTimepoints_.addChangeListener(recalculateTimeLapseDisplay);
timepointPanel_.add(numTimepoints_, "wrap");
timepointPanel_.add(new JLabel("Interval [s]:"));
acquisitionInterval_ = pu.makeSpinnerFloat(1, 32000, 0.1,
Devices.Keys.PLUGIN,
Properties.Keys.PLUGIN_ACQUISITION_INTERVAL, 60);
acquisitionInterval_.addChangeListener(recalculateTimeLapseDisplay);
timepointPanel_.add(acquisitionInterval_, "wrap");
// enable/disable panel elements depending on checkbox state
useTimepointsCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
componentsSetEnabled(timepointPanel_, useTimepointsCB_.isSelected());
}
});
componentsSetEnabled(timepointPanel_, useTimepointsCB_.isSelected()); // initialize
// end repeat sub-panel
// start savePanel
final int textFieldWidth = 16;
savePanel_ = new JPanel(new MigLayout(
"",
"[right]10[center]8[left]",
"[]4[]"));
savePanel_.setBorder(PanelUtils.makeTitledBorder("Data Saving Settings"));
separateTimePointsCB_ = pu.makeCheckBox("Separate viewer / file for each time point",
Properties.Keys.PREFS_SEPARATE_VIEWERS_FOR_TIMEPOINTS, panelName_, false);
savePanel_.add(separateTimePointsCB_, "span 3, left, wrap");
hideCB_ = pu.makeCheckBox("Hide viewer",
Properties.Keys.PREFS_HIDE_WHILE_ACQUIRING, panelName_, false);
savePanel_.add(hideCB_, "left");
hideCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent ae) {
// if viewer is hidden then force saving to disk
if (hideCB_.isSelected()) {
if (!saveCB_.isSelected()) {
saveCB_.doClick();
}
saveCB_.setEnabled(false);
} else {
saveCB_.setEnabled(true);
}
}
});
saveCB_ = pu.makeCheckBox("Save while acquiring",
Properties.Keys.PREFS_SAVE_WHILE_ACQUIRING, panelName_, false);
// init the save while acquiring CB; could also do two doClick() calls
if (hideCB_.isSelected()) {
saveCB_.setEnabled(false);
}
savePanel_.add(saveCB_, "span 2, center, wrap");
JLabel dirRootLabel = new JLabel ("Directory root:");
savePanel_.add(dirRootLabel);
rootField_ = new JTextField();
rootField_.setText( prefs_.getString(panelName_,
Properties.Keys.PLUGIN_DIRECTORY_ROOT, "") );
rootField_.setColumns(textFieldWidth);
savePanel_.add(rootField_, "span 2");
JButton browseRootButton = new JButton();
browseRootButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
setRootDirectory(rootField_);
prefs_.putString(panelName_, Properties.Keys.PLUGIN_DIRECTORY_ROOT,
rootField_.getText());
}
});
browseRootButton.setMargin(new Insets(2, 5, 2, 5));
browseRootButton.setText("...");
savePanel_.add(browseRootButton, "wrap");
JLabel namePrefixLabel = new JLabel();
namePrefixLabel.setText("Name prefix:");
savePanel_.add(namePrefixLabel);
nameField_ = new JTextField("acq");
nameField_.setText( prefs_.getString(panelName_,
Properties.Keys.PLUGIN_NAME_PREFIX, "acq"));
nameField_.setColumns(textFieldWidth);
savePanel_.add(nameField_, "span 2, wrap");
// since we use the name field even for acquisitions in RAM,
// we only need to gray out the directory-related components
final JComponent[] saveComponents = { browseRootButton, rootField_,
dirRootLabel };
componentsSetEnabled(saveComponents, saveCB_.isSelected());
saveCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
componentsSetEnabled(saveComponents, saveCB_.isSelected());
}
});
// end save panel
// start duration report panel
durationPanel_ = new JPanel(new MigLayout(
"",
"[right]6[left, 40%!]",
"[]5[]"));
durationPanel_.setBorder(PanelUtils.makeTitledBorder("Durations"));
durationPanel_.setPreferredSize(new Dimension(125, 0)); // fix width so it doesn't constantly change depending on text
durationPanel_.add(new JLabel("Slice:"));
actualSlicePeriodLabel_ = new JLabel();
durationPanel_.add(actualSlicePeriodLabel_, "wrap");
durationPanel_.add(new JLabel("Volume:"));
actualVolumeDurationLabel_ = new JLabel();
durationPanel_.add(actualVolumeDurationLabel_, "wrap");
durationPanel_.add(new JLabel("Total:"));
actualTimeLapseDurationLabel_ = new JLabel();
durationPanel_.add(actualTimeLapseDurationLabel_, "wrap");
// end duration report panel
buttonStart_ = new JToggleButton();
buttonStart_.setIconTextGap(6);
buttonStart_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (acquisitionRunning_.get()) {
cancelAcquisition_.set(true);
} else {
runAcquisition();
}
}
});
updateStartButton(); // call once to initialize, isSelected() will be false
acquisitionStatusLabel_ = new JLabel("");
updateAcquisitionStatus(AcquisitionStatus.NONE);
// Channel Panel (separate file for code)
multiChannelPanel_ = new MultiChannelSubPanel(gui, devices_, props_, prefs_);
multiChannelPanel_.addDurationLabelListener(this);
// Position Panel
final JPanel positionPanel = new JPanel();
positionPanel.setLayout(new MigLayout("flowx, fillx","[right]10[left][10][]","[]8[]"));
usePositionsCB_ = pu.makeCheckBox("Multiple positions (XY)",
Properties.Keys.PREFS_USE_MULTIPOSITION, panelName_, false);
usePositionsCB_.setToolTipText("Acquire datasest at multiple postions");
usePositionsCB_.setEnabled(true);
usePositionsCB_.setFocusPainted(false);
componentBorder =
new ComponentTitledBorder(usePositionsCB_, positionPanel,
BorderFactory.createLineBorder(ASIdiSPIM.borderColor));
positionPanel.setBorder(componentBorder);
final JButton editPositionListButton = new JButton("Edit position list...");
editPositionListButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
gui_.showXYPositionList();
}
});
positionPanel.add(editPositionListButton, "span 2, center");
// add empty fill space on right side of panel
positionPanel.add(new JLabel(""), "wrap, growx");
positionPanel.add(new JLabel("Post-move delay [ms]:"));
positionDelay_ = pu.makeSpinnerFloat(0.0, 10000.0, 100.0,
Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_POSITION_DELAY,
0.0);
positionPanel.add(positionDelay_, "wrap");
// enable/disable panel elements depending on checkbox state
usePositionsCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
componentsSetEnabled(positionPanel, usePositionsCB_.isSelected());
}
});
componentsSetEnabled(positionPanel, usePositionsCB_.isSelected()); // initialize
// end of Position panel
// checkbox to use navigation joystick settings or not
// an "orphan" UI element
navigationJoysticksCB_ = new JCheckBox("Use Navigation joystick settings");
navigationJoysticksCB_.setSelected(prefs_.getBoolean(panelName_,
Properties.Keys.PLUGIN_USE_NAVIGATION_JOYSTICKS, false));
navigationJoysticksCB_.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
if (navigationJoysticksCB_.isSelected()) {
ASIdiSPIM.getFrame().getNavigationPanel().doJoystickSettings();
} else {
joystick_.unsetAllJoysticks();
}
prefs_.putBoolean(panelName_, Properties.Keys.PLUGIN_USE_NAVIGATION_JOYSTICKS,
navigationJoysticksCB_.isSelected());
}
});
// set up tabbed panels for GUI
// make 3 columns as own JPanels to get vertical space right
// in each column without dependencies on other columns
leftColumnPanel_ = new JPanel(new MigLayout(
"",
"[]",
"[]6[]10[]10[]"));
leftColumnPanel_.add(durationPanel_, "split 2");
leftColumnPanel_.add(timepointPanel_, "wrap, growx");
leftColumnPanel_.add(savePanel_, "wrap");
leftColumnPanel_.add(new JLabel("SPIM mode: "), "split 2, left");
AcquisitionModes acqModes = new AcquisitionModes(devices_, props_, prefs_);
spimMode_ = acqModes.getComboBox();
leftColumnPanel_.add(spimMode_, "wrap");
leftColumnPanel_.add(buttonStart_, "split 2, left");
leftColumnPanel_.add(acquisitionStatusLabel_);
centerColumnPanel_ = new JPanel(new MigLayout(
"",
"[]",
"[]"));
centerColumnPanel_.add(positionPanel, "growx, wrap");
centerColumnPanel_.add(multiChannelPanel_, "wrap");
centerColumnPanel_.add(navigationJoysticksCB_, "wrap");
// add the column panels to the main panel
this.add(leftColumnPanel_);
this.add(centerColumnPanel_);
this.add(volPanel_);
// properly initialize the advanced slice timing
advancedSliceTimingCB_.addItemListener(sliceTimingDisableGUIInputs);
sliceTimingDisableGUIInputs.itemStateChanged(null);
//advancedSliceTimingCB_.doClick();
//advancedSliceTimingCB_.doClick();
advancedSliceTimingCB_.addActionListener(showAdvancedTimingFrame);
updateDurationLabels();
// for easy timing mode, calculate slice timing to start
if (!advancedSliceTimingCB_.isSelected()
&& checkCamerasAssigned(false)) {
calculateSliceTiming_.doClick();
}
}//end constructor
public final void updateDurationLabels() {
updateActualSlicePeriodLabel();
updateActualVolumeDurationLabel();
updateActualTimeLapseDurationLabel();
}
/**
* Sets the acquisition name prefix programmatically.
* Added so that name prefix can be changed from a script.
* @param acqName
*/
public void setAcquisitionNamePrefix(String acqName) {
nameField_.setText(acqName);
}
private void updateStartButton() {
boolean started = acquisitionRunning_.get();
buttonStart_.setSelected(started);
buttonStart_.setText(started ? "Stop!" : "Start!");
buttonStart_.setBackground(started ? Color.red : Color.green);
buttonStart_.setIcon(started ?
SwingResourceManager.
getIcon(MMStudio.class,
"/org/micromanager/icons/cancel.png")
: SwingResourceManager.getIcon(MMStudio.class,
"/org/micromanager/icons/arrow_right.png"));
}
/**
* @return either "A" or "B"
*/
private String getFirstSide() {
return (String)firstSide_.getSelectedItem();
}
private boolean isFirstSideA() {
return getFirstSide().equals("A");
}
/**
* @return either 1 or 2
*/
private int getNumSides() {
if (numSides_.getSelectedIndex() == 1) {
return 2;
} else {
return 1;
}
}
private boolean isTwoSided() {
return (numSides_.getSelectedIndex() == 1);
}
private int getNumTimepoints() {
if (!useTimepointsCB_.isSelected()) {
return 1;
}
return (Integer) numTimepoints_.getValue();
}
private int getNumChannels() {
if (!multiChannelPanel_.isPanelEnabled()) {
return 1;
}
return multiChannelPanel_.getUsedChannels().length;
}
private int getLineScanPeriod() {
return (Integer) lineScanPeriod_.getValue();
}
private int getNumScansPerSlice() {
return (Integer) numScansPerSlice_.getValue();
}
private int getNumSlices() {
return (Integer) numSlices_.getValue();
}
private double getStepSizeUm() {
return PanelUtils.getSpinnerFloatValue(stepSize_);
}
/**
*
* @param showWarnings true to warn user about needing to change slice period
* @return
*/
private SliceTiming getTimingFromPeriodAndLightExposure(boolean showWarnings) {
// uses algorithm Jon worked out in Octave code; each slice period goes like this:
// 1. camera readout time (none if in overlap mode)
// 2. any extra delay time
// 3. camera reset time
// 4. start scan 0.25ms before camera global exposure and shifted up in time to account for delay introduced by Bessel filter
// 5. turn on laser as soon as camera global exposure, leave laser on for desired light exposure time
// 7. end camera exposure in final 0.25ms, post-filter scan waveform also ends now
final float scanLaserBufferTime = 0.25f;
final Color foregroundColorOK = Color.BLACK;
final Color foregroundColorError = Color.RED;
final Component elementToColor = desiredSlicePeriod_.getEditor().getComponent(0);
SliceTiming s = new SliceTiming();
float cameraResetTime = computeCameraResetTime(); // recalculate for safety
float cameraReadoutTime = computeCameraReadoutTime(); // recalculate for safety
// get delay between trigger and when exposure timer starts so we can
// decrease camera exposure accordingly
// for now simply recover "overhead time" in computeCameraReasetTime()
// if readout/reset calculations change then this may need to be more sophisticated
float cameraExposureDelayTime = cameraResetTime - cameraReadoutTime;
float desiredPeriod = minSlicePeriodCB_.isSelected() ? 0 :
PanelUtils.getSpinnerFloatValue(desiredSlicePeriod_);
float desiredExposure = PanelUtils.getSpinnerFloatValue(desiredLightExposure_);
float cameraReadout_max = MyNumberUtils.ceilToQuarterMs(cameraReadoutTime);
float cameraReset_max = MyNumberUtils.ceilToQuarterMs(cameraResetTime);
float slicePeriod = MyNumberUtils.roundToQuarterMs(desiredPeriod);
int scanPeriod = Math.round(desiredExposure + 2*scanLaserBufferTime);
// scan will be longer than laser by 0.25ms at both start and end
float laserDuration = scanPeriod - 2*scanLaserBufferTime; // will be integer plus 0.5
// computer "extra" per-slice time: period minus camera reset and readout times minus (scan time - 0.25ms)
// the last 0.25ms correction comes because we start the scan 0.25ms before camera global exposure
float globalDelay = slicePeriod - cameraReadout_max - cameraReset_max - scanPeriod + scanLaserBufferTime;
// if calculated delay is negative then we have to reduce exposure time in 1 sec increments
if (globalDelay < 0) {
float extraTimeNeeded = MyNumberUtils.ceilToQuarterMs(-1f*globalDelay); // positive number
globalDelay += extraTimeNeeded;
if (showWarnings) {
MyDialogUtils.showError(
"Increasing slice period to meet laser exposure constraint\n"
+ "(time required for camera readout; readout time depends on ROI).\n");
elementToColor.setForeground(foregroundColorError);
// considered actually changing the value, but decided against it because
// maybe the user just needs to set the ROI appropriately and recalculate
} else {
elementToColor.setForeground(foregroundColorOK);
}
} else {
elementToColor.setForeground(foregroundColorOK);
}
// account for delay in scan position based on Bessel filter by starting the scan slightly earlier
// than we otherwise would; delay is (empirically) ~0.33/(freq in kHz)
// find better results adding 0.4/(freq in kHz) though
// group delay for bessel filter approx 1/w or ~0.16/freq, or half/third the empirical value (not sure why discrepancy)
float scanFilterFreq = Math.max(props_.getPropValueFloat(Devices.Keys.GALVOA, Properties.Keys.SCANNER_FILTER_X),
props_.getPropValueFloat(Devices.Keys.GALVOB, Properties.Keys.SCANNER_FILTER_X));
float scanDelayFilter = 0;
if (scanFilterFreq != 0) {
scanDelayFilter = MyNumberUtils.roundToQuarterMs(0.4f/scanFilterFreq);
}
// Add 0.25ms to globalDelay if it is 0 and we are on overlap mode and scan has been shifted forward
// basically the last 0.25ms of scan time that would have determined the slice period isn't
// there any more because the scan time is moved up => add in the 0.25ms at the start of the slice
// in edge or level trigger mode the camera trig falling edge marks the end of the slice period
// not sure if PCO pseudo-overlap needs this, probably not because adding 0.25ms overhead in that case
if (MyNumberUtils.floatsEqual(cameraReadout_max, 0f) // true iff overlap being used
&& (scanDelayFilter > 0.01f)) {
globalDelay += 0.25f;
}
// If the PLogic card is used, account for 0.25ms delay it introduces
// to the camera and laser trigger signals => subtract 0.25ms to the scanner delay
// (start scanner 0.25ms later than it would be otherwise)
// (really it is 0.25ms minus the evaluation time to generate the signals)
// this time-shift opposes the Bessel filter delay
if (devices_.isValidMMDevice(Devices.Keys.PLOGIC)) {
scanDelayFilter -= 0.25f;
}
s.scanDelay = cameraReadout_max + globalDelay + cameraReset_max - scanDelayFilter - scanLaserBufferTime;
s.scanNum = 1;
s.scanPeriod = scanPeriod;
s.laserDelay = cameraReadout_max + globalDelay + cameraReset_max;
s.laserDuration = laserDuration;
s.cameraDelay = cameraReadout_max + globalDelay;
s.cameraDuration = cameraReset_max + scanPeriod - scanLaserBufferTime; // approx. same as exposure, can be used in bulb mode
s.cameraExposure = s.cameraDuration
- 0.01f // give up 0.10ms of our 0.25ms overhead here because camera might round up
// from the set exposure time and thus exceeding total period
- cameraExposureDelayTime;
// change camera duration for overlap mode to be short trigger
// needed because exposure time is set by difference between pulses in this mode
CameraModes.Keys cameraMode = CameraModes.getKeyFromPrefCode(
prefs_.getInt(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PLUGIN_CAMERA_MODE, 0));
if (cameraMode == CameraModes.Keys.OVERLAP) {
// for Hamamatsu's "synchronous" or Zyla's "overlap" mode
// send single short trigger
s.cameraDuration = 1;
}
return s;
}
/**
* @return true if the slice timing matches the current user parameters and ROI
*/
private boolean isSliceTimingUpToDate() {
SliceTiming newTiming = getTimingFromPeriodAndLightExposure(false);
return sliceTiming_.equals(newTiming);
}
/**
* Re-calculate the controller's timing settings for "easy timing" mode.
* If the values are the same nothing happens. If they should be changed,
* then the controller's properties will be set.
* @param showWarnings will show warning if the user-specified slice period too short
*/
private void recalculateSliceTiming(boolean showWarnings) {
if(!checkCamerasAssigned(true)) {
return;
}
sliceTiming_ = getTimingFromPeriodAndLightExposure(showWarnings);
PanelUtils.setSpinnerFloatValue(delayScan_, sliceTiming_.scanDelay);
numScansPerSlice_.setValue(sliceTiming_.scanNum);
lineScanPeriod_.setValue(sliceTiming_.scanPeriod);
PanelUtils.setSpinnerFloatValue(delayLaser_, sliceTiming_.laserDelay);
PanelUtils.setSpinnerFloatValue(durationLaser_, sliceTiming_.laserDuration);
PanelUtils.setSpinnerFloatValue(delayCamera_, sliceTiming_.cameraDelay);
PanelUtils.setSpinnerFloatValue(durationCamera_, sliceTiming_.cameraDuration );
}
/**
* Compute slice period in ms based on controller's timing settings.
* @return period in ms
*/
private double computeActualSlicePeriod() {
double period = Math.max(Math.max(
PanelUtils.getSpinnerFloatValue(delayScan_) + // scan time
(getLineScanPeriod() * getNumScansPerSlice()),
PanelUtils.getSpinnerFloatValue(delayLaser_)
+ PanelUtils.getSpinnerFloatValue(durationLaser_) // laser time
),
PanelUtils.getSpinnerFloatValue(delayCamera_)
+ PanelUtils.getSpinnerFloatValue(durationCamera_) // camera time
);
return period;
}
/**
* Update the displayed slice period.
*/
private void updateActualSlicePeriodLabel() {
actualSlicePeriodLabel_.setText(
NumberUtils.doubleToDisplayString(computeActualSlicePeriod()) +
" ms");
}
/**
* Compute the volume duration in ms based on controller's timing settings.
* @return duration in ms
*/
private double computeActualVolumeDuration() {
double duration = getNumSides() * getNumChannels() *
(PanelUtils.getSpinnerFloatValue(delaySide_) +
getNumSlices() * computeActualSlicePeriod());
return duration;
}
/**
* Update the displayed volume duration.
*/
private void updateActualVolumeDurationLabel() {
actualVolumeDurationLabel_.setText(
NumberUtils.doubleToDisplayString(computeActualVolumeDuration()) +
" ms");
}
/**
* Compute the time lapse duration
* @return duration in s
*/
private double computeActualTimeLapseDuration() {
double duration = (getNumTimepoints() - 1) *
PanelUtils.getSpinnerFloatValue(acquisitionInterval_)
+ computeActualVolumeDuration()/1000;
return duration;
}
/**
* Update the displayed time lapse duration.
*/
private void updateActualTimeLapseDurationLabel() {
String s = "";
double duration = computeActualTimeLapseDuration();
if (duration < 60) { // less than 1 min
s += NumberUtils.doubleToDisplayString(duration) + " s";
} else if (duration < 60*60) { // between 1 min and 1 hour
s += NumberUtils.doubleToDisplayString(Math.floor(duration/60)) + " min ";
s += NumberUtils.doubleToDisplayString(Math.round(duration % 60)) + " s";
} else { // longer than 1 hour
s += NumberUtils.doubleToDisplayString(Math.floor(duration/(60*60))) + " hr ";
s += NumberUtils.doubleToDisplayString(Math.round((duration % (60*60))/60)) + " min";
}
actualTimeLapseDurationLabel_.setText(s);
}
/**
* Computes the reset time of the SPIM cameras set on Devices panel.
* Handles single-side operation.
* Needed for computing (semi-)optimized slice timing in "easy timing" mode.
* @return
*/
private float computeCameraResetTime() {
float resetTime;
if (isTwoSided()) {
resetTime = Math.max(cameras_.computeCameraResetTime(Devices.Keys.CAMERAA),
cameras_.computeCameraResetTime(Devices.Keys.CAMERAB));
} else {
if (isFirstSideA()) {
resetTime = cameras_.computeCameraResetTime(Devices.Keys.CAMERAA);
} else {
resetTime = cameras_.computeCameraResetTime(Devices.Keys.CAMERAB);
}
}
return resetTime;
}
/**
* Computes the readout time of the SPIM cameras set on Devices panel.
* Handles single-side operation.
* Needed for computing (semi-)optimized slice timing in "easy timing" mode.
* @return
*/
private float computeCameraReadoutTime() {
float readoutTime;
CameraModes.Keys camMode = CameraModes.getKeyFromPrefCode(
prefs_.getInt(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PLUGIN_CAMERA_MODE, 0));
boolean isOverlap = (camMode == CameraModes.Keys.OVERLAP ||
camMode == CameraModes.Keys.PSEUDO_OVERLAP);
if (isTwoSided()) {
readoutTime = Math.max(cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAA, isOverlap),
cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAB, isOverlap));
} else {
if (isFirstSideA()) {
readoutTime = cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAA, isOverlap);
} else {
readoutTime = cameras_.computeCameraReadoutTime(Devices.Keys.CAMERAB, isOverlap);
}
}
return readoutTime;
}
/**
* Makes sure that cameras are assigned to the desired sides and display error message
* if not (e.g. if single-sided with side B first, then only checks camera for side B)
* @return true if cameras assigned, false if not
*/
private boolean checkCamerasAssigned(boolean showWarnings) {
String firstCamera, secondCamera;
boolean firstSideA = isFirstSideA();
if (firstSideA) {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
} else {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
}
if (firstCamera == null) {
if (showWarnings) {
MyDialogUtils.showError("Please select a valid camera for the first side (Imaging Path " +
(firstSideA ? "A" : "B") + ") on the Devices Panel");
}
return false;
}
if (isTwoSided() && secondCamera == null) {
if (showWarnings) {
MyDialogUtils.showError("Please select a valid camera for the second side (Imaging Path " +
(firstSideA ? "B" : "A") + ") on the Devices Panel.");
}
return false;
}
return true;
}
/**
* used for updateAcquisitionStatus() calls
*/
private static enum AcquisitionStatus {
NONE,
ACQUIRING,
WAITING,
DONE,
}
private void updateAcquisitionStatus(AcquisitionStatus phase) {
updateAcquisitionStatus(phase, 0);
}
private void updateAcquisitionStatus(AcquisitionStatus phase, int secsToNextAcquisition) {
String text = "";
switch(phase) {
case NONE:
text = "No acquisition in progress.";
break;
case ACQUIRING:
text = "Acquiring time point "
+ NumberUtils.intToDisplayString(numTimePointsDone_)
+ " of "
+ NumberUtils.intToDisplayString(getNumTimepoints());
break;
case WAITING:
text = "Finished "
+ NumberUtils.intToDisplayString(numTimePointsDone_)
+ " of "
+ NumberUtils.intToDisplayString(getNumTimepoints())
+ " time points; next in "
+ NumberUtils.intToDisplayString(secsToNextAcquisition)
+ " s.";
break;
case DONE:
text = "Acquisition finished with "
+ NumberUtils.intToDisplayString(numTimePointsDone_)
+ " time points.";
break;
default:
break;
}
acquisitionStatusLabel_.setText(text);
}
/**
* call setEnabled(boolean) on all components in list
* @param components
* @param enabled
*/
private static void componentsSetEnabled(JComponent[] components, boolean enabled) {
for (JComponent c : components) {
c.setEnabled(enabled);
}
}
/**
* call setEnabled(boolean) on all components in frame/panel
* @param panel
* @param enabled
*/
private static void componentsSetEnabled(Container container, boolean enabled) {
for (Component comp : container.getComponents()) {
comp.setEnabled(enabled);
}
}
/**
* Sets all the controller's properties according to volume settings
* and otherwise gets controller all ready for acquisition
* (except for final trigger).
* @param side
* @return false if there was some error that should abort acquisition
*/
private boolean prepareControllerForAquisition(Devices.Sides side) {
Devices.Keys galvoDevice = Devices.getSideSpecificKey(Devices.Keys.GALVOA, side);
Devices.Keys piezoDevice = Devices.getSideSpecificKey(Devices.Keys.PIEZOA, side);
boolean ignoreMissingScanner = prefs_.getBoolean(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PREFS_IGNORE_MISSING_SCANNER, false);
boolean haveMissingScanner = !devices_.isValidMMDevice(galvoDevice);
boolean skipScannerWarnings = ignoreMissingScanner && haveMissingScanner;
// checks to prevent hard-to-diagnose other errors
if (!ignoreMissingScanner && haveMissingScanner) {
MyDialogUtils.showError("Scanner device required; please check Devices tab.");
return false;
}
// if we are changing color slice by slice then set controller to do multiple slices per piezo move
// otherwise just set to 1 slice per piezo move
int numSlicesPerPiezo = 1;
if (props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE)
== MultichannelModes.Keys.SLICE_HW.getPrefCode()) {
numSlicesPerPiezo = getNumChannels();
}
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_SLICES_PER_PIEZO,
numSlicesPerPiezo, skipScannerWarnings);
// if we are changing color volume by volume then set controller to do multiple volumes per start trigger
// otherwise just set to 1 volume per start trigger
int numVolumesPerTrigger = 1;
if (props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE)
== MultichannelModes.Keys.VOLUME_HW.getPrefCode()) {
numVolumesPerTrigger = getNumChannels();
}
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_REPEATS, numVolumesPerTrigger, skipScannerWarnings);
AcquisitionModes.Keys spimMode = (AcquisitionModes.Keys) spimMode_.getSelectedItem();
// figure out the piezo parameters
int numSlices = getNumSlices();
float piezoAmplitude = ( (numSlices - 1) *
PanelUtils.getSpinnerFloatValue(stepSize_));
float piezoCenter = prefs_.getFloat(
MyStrings.PanelNames.SETUP.toString() + side.toString(),
Properties.Keys.PLUGIN_PIEZO_CENTER_POS, 0);
// if we set piezoAmplitude to 0 here then sliceAmplitude will also be 0
if (spimMode.equals(AcquisitionModes.Keys.NO_SCAN)) {
piezoAmplitude = 0.0f;
}
// tweak the parameters if we are using synchronous/overlap mode
// object is to get exact same piezo/scanner positions in first
// N frames (piezo/scanner will move to N+1st position but no image taken)
CameraModes.Keys cameraMode = CameraModes.getKeyFromPrefCode(
prefs_.getInt(MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PLUGIN_CAMERA_MODE, 0));
if (cameraMode == CameraModes.Keys.OVERLAP) {
piezoAmplitude *= ((float)numSlices)/(numSlices-1);
piezoCenter += piezoAmplitude/(2*numSlices);
numSlices += 1;
}
float sliceRate = prefs_.getFloat(
MyStrings.PanelNames.SETUP.toString() + side.toString(),
Properties.Keys.PLUGIN_RATE_PIEZO_SHEET, -80);
if (MyNumberUtils.floatsEqual(sliceRate, 0.0f)) {
MyDialogUtils.showError("Rate for slice " + side.toString() +
" cannot be zero. Re-do calibration on Setup tab.");
return false;
}
float sliceOffset = prefs_.getFloat(
MyStrings.PanelNames.SETUP.toString() + side.toString(),
Properties.Keys.PLUGIN_OFFSET_PIEZO_SHEET, 0);
float sliceAmplitude = piezoAmplitude / sliceRate;
float sliceCenter = (piezoCenter - sliceOffset) / sliceRate;
// get the micro-mirror card ready
// SA_AMPLITUDE_X_DEG and SA_OFFSET_X_DEG done by setup tabs
boolean triangleWave = prefs_.getBoolean(
MyStrings.PanelNames.SETTINGS.toString(),
Properties.Keys.PREFS_SCAN_OPPOSITE_DIRECTIONS, true);
Properties.Values scanPattern = triangleWave ?
Properties.Values.SAM_TRIANGLE : Properties.Values.SAM_RAMP;
props_.setPropValue(galvoDevice, Properties.Keys.SA_PATTERN_X,
scanPattern, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SA_AMPLITUDE_Y_DEG,
sliceAmplitude, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SA_OFFSET_Y_DEG,
sliceCenter, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.BEAM_ENABLED,
Properties.Values.NO, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_SLICES,
numSlices, skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_NUM_SIDES,
getNumSides(), skipScannerWarnings);
props_.setPropValue(galvoDevice, Properties.Keys.SPIM_FIRSTSIDE,
getFirstSide(), skipScannerWarnings);
// get the piezo card ready; skip if no piezo specified
if (devices_.isValidMMDevice(piezoDevice)) {
if (spimMode.equals(AcquisitionModes.Keys.SLICE_SCAN_ONLY)) {
piezoAmplitude = 0.0f;
}
props_.setPropValue(piezoDevice,
Properties.Keys.SA_AMPLITUDE, piezoAmplitude);
props_.setPropValue(piezoDevice,
Properties.Keys.SA_OFFSET, piezoCenter);
props_.setPropValue(piezoDevice,
Properties.Keys.SPIM_NUM_SLICES, numSlices);
props_.setPropValue(piezoDevice,
Properties.Keys.SPIM_STATE, Properties.Values.SPIM_ARMED);
}
return true;
}
/**
* Gets the associated PLogic BNC from the channel (containing preset name)
* @param channel
* @return value 5, 6, 7, or 8; returns 0 if there is an error
*/
private int getPLogicOutputFromChannel(ChannelSpec channel) {
try {
Configuration configData = core_.getConfigData(multiChannelPanel_.getChannelGroup(), channel.config_);
if (!configData.isPropertyIncluded(devices_.getMMDevice(Devices.Keys.PLOGIC), Properties.Keys.PLOGIC_OUTPUT_CHANNEL.toString())) {
MyDialogUtils.showError("Must include PLogic \"OutputChannel\" in preset for hardware switching");
return 0;
}
String setting = configData.getSetting(devices_.getMMDevice(Devices.Keys.PLOGIC), Properties.Keys.PLOGIC_OUTPUT_CHANNEL.toString()).getPropertyValue();
if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC5.toString())) {
return 5;
} else if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC6.toString())) {
return 6;
} else if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC7.toString())) {
return 7;
} else if (setting.equals(Properties.Values.PLOGIC_CHANNEL_BNC8.toString())) {
return 8;
} else {
MyDialogUtils.showError("Channel preset setting must use PLogic \"OutputChannel\" and be set to one of outputs 5-8 only");
return 0;
}
} catch (Exception e) {
MyDialogUtils.showError(e, "Could not get PLogic output from channel");
return 0;
}
}
/**
* Programs the PLogic card for hardware channel switching
* according to the selections in the Multichannel subpanel
* @return false if there is a fatal error, true if successful
*/
private boolean setupHardwareChannelSwitching() {
final int counterLSBAddress = 3;
final int counterMSBAddress = 4;
final int laserTTLAddress = 42;
final int invertAddress = 64;
if (!devices_.isValidMMDevice(Devices.Keys.PLOGIC)) {
MyDialogUtils.showError("PLogic card required for hardware switching");
return false;
}
// set up clock for counters
MultichannelModes.Keys prefCode = MultichannelModes.getKeyFromPrefCode(
props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE));
switch (prefCode) {
case SLICE_HW:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_CLOCK_LASER);
break;
case VOLUME_HW:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_CLOCK_SIDE);
break;
default:
MyDialogUtils.showError("Unknown multichannel mode for hardware switching");
return false;
}
// set up hardware counter
switch (getNumChannels()) {
case 1:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_1);
break;
case 2:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_2);
break;
case 3:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_3);
break;
case 4:
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_COUNT_4);
break;
default:
MyDialogUtils.showError("Hardware channel switching only supports 1-4 channels");
return false;
}
// speed things up by turning off updates, will restore value later
String editCellUpdates = props_.getPropValueString(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES, Properties.Values.NO);
// make sure cells 13-16 are controlling BNCs 5-8
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_BNC5_8_ON_13_16);
// initialize cells 13-16 which control BNCs 5-8
for (int cellNum=13; cellNum<=16; cellNum++) {
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_POINTER_POSITION, cellNum);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_TYPE, Properties.Values.PLOGIC_AND4);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_2, laserTTLAddress);
// note that PLC diSPIM assumes "laser + side" output mode is selected for micro-mirror card
}
// identify from the presets
ChannelSpec[] channels = multiChannelPanel_.getUsedChannels();
for (int channelNum = 0; channelNum < channels.length; channelNum++) {
// we already know there are between 1 and 4 channels
int outputNum = getPLogicOutputFromChannel(channels[channelNum]);
if (outputNum<5) {
// restore update setting
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES, editCellUpdates);
return false; // already displayed error
}
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_POINTER_POSITION, outputNum + 8);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_1, invertAddress); // enable this AND4
// map the channel number to the equivalent addresses for the AND4
// inputs should be either 3 (for LSB high) or 67 (for LSB low)
// and 4 (for MSB high) or 68 (for MSB low)
int in3 = (channelNum & 0x01) > 0 ? counterLSBAddress : counterLSBAddress + invertAddress;
int in4 = (channelNum & 0x10) > 0 ? counterMSBAddress : counterMSBAddress + invertAddress;
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_3, in3);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_INPUT_4, in4);
}
// restore update setting
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_EDIT_CELL_UPDATES, editCellUpdates);
return true;
}
/**
* Implementation of acquisition that orchestrates image
* acquisition itself rather than using the acquisition engine.
*
* This methods is public so that the ScriptInterface can call it
* Please do not access this yourself directly, instead use the API, e.g.
* import org.micromanager.asidispim.api.*;
* ASIdiSPIMInterface diSPIM = new ASIdiSPIMImplementation();
* diSPIM.runAcquisition();
*/
public void runAcquisition() {
class acqThread extends Thread {
acqThread(String threadName) {
super(threadName);
}
@Override
public void run() {
ReportingUtils.logMessage("User requested start of diSPIM acquisition.");
cancelAcquisition_.set(false);
acquisitionRunning_.set(true);
updateStartButton();
boolean success = runAcquisitionPrivate();
if (!success) {
ReportingUtils.logError("Fatal error running diSPIM acquisition.");
}
acquisitionRunning_.set(false);
updateStartButton();
}
}
acqThread acqt = new acqThread("diSPIM Acquisition");
acqt.start();
}
/**
* Actually runs the acquisition; does the dirty work of setting
* up the controller, the circular buffer, starting the cameras,
* grabbing the images and putting them into the acquisition, etc.
* @return true if ran without any fatal errors.
*/
private boolean runAcquisitionPrivate() {
if (gui_.isAcquisitionRunning()) {
MyDialogUtils.showError("An acquisition is already running");
return false;
}
boolean liveModeOriginally = gui_.isLiveModeOn();
if (liveModeOriginally) {
gui_.enableLiveMode(false);
}
// get MM device names for first/second cameras to acquire
String firstCamera, secondCamera;
boolean firstSideA = isFirstSideA();
if (firstSideA) {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
} else {
firstCamera = devices_.getMMDevice(Devices.Keys.CAMERAB);
secondCamera = devices_.getMMDevice(Devices.Keys.CAMERAA);
}
boolean sideActiveA, sideActiveB;
boolean twoSided = isTwoSided();
if (twoSided) {
sideActiveA = true;
sideActiveB = true;
} else {
secondCamera = null;
if (firstSideA) {
sideActiveA = true;
sideActiveB = false;
} else {
sideActiveA = false;
sideActiveB = true;
}
}
int nrSides = getNumSides();
// set up channels
int nrChannels = getNumChannels();
String originalConfig = "";
boolean changeChannelPerVolumeSoftware = false;
boolean useChannels = multiChannelPanel_.isPanelEnabled();
if (useChannels) {
if (nrChannels < 1) {
MyDialogUtils.showError("\"Channels\" is checked, but no channels are selected");
return false;
}
MultichannelModes.Keys multichannelMode = MultichannelModes.getKeyFromPrefCode(
props_.getPropValueInteger(Devices.Keys.PLUGIN, Properties.Keys.PLUGIN_MULTICHANNEL_MODE));
switch (multichannelMode) {
case VOLUME:
changeChannelPerVolumeSoftware = true;
multiChannelPanel_.initializeChannelCycle();
// get current channel so that we can restore it
// I tried core_.get/setSystemStateCache, but that made the Tiger controller very confused and I had to re-apply the firmware
originalConfig = multiChannelPanel_.getCurrentConfig();
break;
case VOLUME_HW:
case SLICE_HW:
if (!setupHardwareChannelSwitching()) {
return false;
}
break;
default:
MyDialogUtils.showError("Unsupported multichannel mode \"" + multichannelMode.toString() + "\"");
return false;
}
}
// set up XY positions
int nrPositions = 1;
boolean usePositions = usePositionsCB_.isSelected();
PositionList positionList = new PositionList();
if (usePositions) {
try {
positionList = gui_.getPositionList();
nrPositions = positionList.getNumberOfPositions();
} catch (MMScriptException ex) {
MyDialogUtils.showError(ex, "Error getting position list for multiple XY positions");
}
if (nrPositions < 1) {
MyDialogUtils.showError("\"Positions\" is checked, but no positions are in position list");
return false;
}
}
// make sure we have cameras selected
if (!checkCamerasAssigned(true)) {
return false;
}
// make sure slice timings are up to date
if (!advancedSliceTimingCB_.isSelected()) {
if(!isSliceTimingUpToDate()) {
MyDialogUtils.showError("Slice timing is not up to date, please recalculate.");
return false;
}
}
float cameraReadoutTime = computeCameraReadoutTime();
double exposureTime = sliceTiming_.cameraExposure;
boolean show = !hideCB_.isSelected();
boolean save = saveCB_.isSelected();
boolean singleTimePointViewers = separateTimePointsCB_.isSelected();
String rootDir = rootField_.getText();
int nrRepeats; // how many acquisition windows to open
int nrFrames; // how many Micro-manager "frames" = time points to take
if (singleTimePointViewers) {
nrFrames = 1;
nrRepeats = getNumTimepoints();
} else {
nrFrames = getNumTimepoints();
nrRepeats = 1;
}
long timepointsIntervalMs = Math.round(
PanelUtils.getSpinnerFloatValue(acquisitionInterval_) * 1000d);
int nrSlices = getNumSlices();
AcquisitionModes.Keys spimMode = (AcquisitionModes.Keys) spimMode_.getSelectedItem();
boolean autoShutter = core_.getAutoShutter();
boolean shutterOpen = false;
// more sanity checks
double lineScanTime = computeActualSlicePeriod();
if (exposureTime + cameraReadoutTime > lineScanTime) {
MyDialogUtils.showError("Exposure time is longer than time needed for a line scan.\n" +
"This will result in dropped frames.\n" +
"Please change input");
return false;
}
double volumeDuration = computeActualVolumeDuration();
if (getNumTimepoints() > 1) {
if (timepointsIntervalMs < volumeDuration) {
MyDialogUtils.showError("Time point interval shorter than" +
" the time to collect a single volume.\n");
return false;
}
// TODO verify if 0.5 second is good value for overhead time
if (timepointsIntervalMs < (volumeDuration + 500)) {
MyDialogUtils.showError("Micro-Manager requires ~0.5 second overhead time "
+ "to finish up a volume before starting next one. "
+ "Pester the developers if you need faster, it is probably possible.");
return false;
}
}
if (nrRepeats > 10 && separateTimePointsCB_.isSelected()) {
if (!MyDialogUtils.getConfirmDialogResult(
"This will generate " + nrRepeats + " separate windows. "
+ "Do you really want to proceed?",
JOptionPane.OK_CANCEL_OPTION)) {
return false;
}
}
if (hideCB_.isSelected() && !saveCB_.isSelected()) {
MyDialogUtils.showError("Must save data to disk if viewer is hidden");
return false;
}
if (hideCB_.isSelected() && separateTimePointsCB_.isSelected()) {
MyDialogUtils.showError("Cannot have hidden viewer with separate viewers per time point." +
"Pester the developers if you really need this.");
return false;
}
// it appears the circular buffer, which is used by both cameras, can only have one
// image size setting => we require same image height and width for second camera if two-sided
if (twoSided) {
try {
Rectangle roi_1 = core_.getROI(firstCamera);
Rectangle roi_2 = core_.getROI(secondCamera);
if (roi_1.width != roi_2.width || roi_1.height != roi_2.height) {
MyDialogUtils.showError("Camera ROI height and width must be equal because of Micro-Manager's circular buffer");
return false;
}
} catch (Exception ex) {
MyDialogUtils.showError(ex, "Problem getting camera ROIs");
}
}
// empty out circular buffer
try {
core_.clearCircularBuffer();
} catch (Exception ex) {
MyDialogUtils.showError(ex, "Error emptying out the circular buffer");
return false;
}
cameras_.setSPIMCamerasForAcquisition(true);
// stop the serial traffic for position updates during acquisition
posUpdater_.setAcqRunning(true);
numTimePointsDone_ = 0;
// force saving as image stacks, not individual files
// implementation assumes just two options, either
// TaggedImageStorageDiskDefault.class or TaggedImageStorageMultipageTiff.class
boolean separateImageFilesOriginally =
ImageUtils.getImageStorageClass().equals(TaggedImageStorageDiskDefault.class);
ImageUtils.setImageStorageClass(TaggedImageStorageMultipageTiff.class);
// Set up controller SPIM parameters (including from Setup panel settings)
if (sideActiveA) {
boolean success = prepareControllerForAquisition(Devices.Sides.A);
if (! success) {
return false;
}
}
if (sideActiveB) {
boolean success = prepareControllerForAquisition(Devices.Sides.B);
if (! success) {
return false;
}
}
// sets PLogic BNC3 output high to indicate acquisition is going on
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_3, true);
long acqStart = System.currentTimeMillis();
boolean nonfatalError = false;
// do not want to return from within this loop
// loop is executed once per acquisition (i.e. once if separate viewers isn't selected)
for (int tp = 0; tp < nrRepeats; tp++) {
BlockingQueue<TaggedImage> bq = new LinkedBlockingQueue<TaggedImage>(10);
String acqName;
if (singleTimePointViewers) {
acqName = gui_.getUniqueAcquisitionName(nameField_.getText() + "_" + tp);
} else {
acqName = gui_.getUniqueAcquisitionName(nameField_.getText());
}
try {
// check for stop button before each acquisition
if (cancelAcquisition_.get()) {
throw new IllegalMonitorStateException("User stopped the acquisition");
}
ReportingUtils.logMessage("diSPIM plugin starting acquisition " + acqName);
if (spimMode.equals(AcquisitionModes.Keys.NO_SCAN) && ! singleTimePointViewers) {
// swap nrFrames and nrSlices
gui_.openAcquisition(acqName, rootDir, nrSlices, nrSides * nrChannels,
nrFrames, nrPositions, show, save);
} else {
gui_.openAcquisition(acqName, rootDir, nrFrames, nrSides * nrChannels,
nrSlices, nrPositions, show, save);
}
core_.setExposure(firstCamera, exposureTime);
if (twoSided) {
core_.setExposure(secondCamera, exposureTime);
}
// set up channels (side A/B is treated as channel too)
if (useChannels) {
ChannelSpec[] channels = multiChannelPanel_.getUsedChannels();
for (int i = 0; i < channels.length; i++) {
String chName = "-" + channels[i].config_;
gui_.setChannelName(acqName, i * 2, firstCamera + chName);
if (twoSided) {
gui_.setChannelName(acqName, i * 2 + 1, secondCamera + chName);
}
}
} else {
gui_.setChannelName(acqName, 0, firstCamera);
if (twoSided) {
gui_.setChannelName(acqName, 1, secondCamera);
}
}
// initialize acquisition
gui_.initializeAcquisition(acqName, (int) core_.getImageWidth(),
(int) core_.getImageHeight(), (int) core_.getBytesPerPixel(),
(int) core_.getImageBitDepth());
// These metadata have to added after initialization, otherwise
// they will not be shown?!
gui_.setAcquisitionProperty(acqName, "NumberOfSides",
NumberUtils.doubleToDisplayString(getNumSides()) );
String firstSide = "B";
if (firstSideA) {
firstSide = "A";
}
gui_.setAcquisitionProperty(acqName, "FirstSide", firstSide);
gui_.setAcquisitionProperty(acqName, "SlicePeriod_ms",
actualSlicePeriodLabel_.getText());
gui_.setAcquisitionProperty(acqName, "LaserExposure_ms",
NumberUtils.doubleToDisplayString(
(double)PanelUtils.getSpinnerFloatValue(durationLaser_)));
gui_.setAcquisitionProperty(acqName, "VolumeDuration",
actualVolumeDurationLabel_.getText());
gui_.setAcquisitionProperty(acqName, "SPIMmode",
((AcquisitionModes.Keys) spimMode_.getSelectedItem()).toString());
// Multi-page TIFF saving code wants this one:
// TODO: support other types than GRAY16
gui_.setAcquisitionProperty(acqName, "PixelType", "GRAY16");
gui_.setAcquisitionProperty(acqName, "z-step_um",
NumberUtils.doubleToDisplayString(getStepSizeUm()) );
// get circular buffer ready
// do once here but not per-acquisition; need to ensure ROI changes registered
core_.initializeCircularBuffer();
// TODO: use new acquisition interface that goes through the pipeline
//gui_.setAcquisitionAddImageAsynchronous(acqName);
MMAcquisition acq = gui_.getAcquisition(acqName);
// Dive into MM internals since script interface does not support pipelines
ImageCache imageCache = acq.getImageCache();
VirtualAcquisitionDisplay vad = acq.getAcquisitionWindow();
imageCache.addImageCacheListener(vad);
// Start pumping images into the ImageCache
DefaultTaggedImageSink sink = new DefaultTaggedImageSink(bq, imageCache);
sink.start();
// Loop over all the times we trigger the controller's acquisition
// If the interval between frames is shorter than the time to acquire
// them, we can switch to hardware based solution. Not sure how important
// that feature is, so leave it out for now.
for (int timePoint = 0; timePoint < nrFrames; timePoint++) {
// handle intervals between time points
long acqNow = System.currentTimeMillis();
long delay = acqStart + timePoint * timepointsIntervalMs - acqNow;
while (delay > 0 && !cancelAcquisition_.get()) {
updateAcquisitionStatus(AcquisitionStatus.WAITING, (int) (delay / 1000));
long sleepTime = Math.min(1000, delay);
Thread.sleep(sleepTime);
acqNow = System.currentTimeMillis();
delay = acqStart + timePoint * timepointsIntervalMs - acqNow;
}
// check for stop button before each time point
if (cancelAcquisition_.get()) {
throw new IllegalMonitorStateException("User stopped the acquisition");
}
numTimePointsDone_++;
updateAcquisitionStatus(AcquisitionStatus.ACQUIRING);
// loop over all positions
for (int positionNum = 0; positionNum < nrPositions; positionNum++) {
if (usePositions) {
// blocking call; will wait for stages to move
MultiStagePosition.goToPosition(positionList.getPosition(positionNum), core_);
// wait any extra time the user requests
Thread.sleep(Math.round(PanelUtils.getSpinnerFloatValue(positionDelay_)));
}
// loop over all channels
for (int channelNum = 0; channelNum < nrChannels; channelNum++) {
// start the cameras
core_.startSequenceAcquisition(firstCamera, nrSlices, 0, true);
if (twoSided) {
core_.startSequenceAcquisition(secondCamera, nrSlices, 0, true);
}
// deal with shutter
if (autoShutter) {
core_.setAutoShutter(false);
shutterOpen = core_.getShutterOpen();
if (!shutterOpen) {
core_.setShutterOpen(true);
}
}
// deal with channel if needed (hardware channel switching doesn't happen here)
if (changeChannelPerVolumeSoftware) {
multiChannelPanel_.selectNextChannel();
}
// trigger the Tiger controller
// TODO generalize this for different ways of running SPIM
// only matters which device we trigger if there are two micro-mirror cards
if (firstSideA) {
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_RUNNING, true);
} else {
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_RUNNING, true);
}
ReportingUtils.logDebugMessage("Starting time point " + (timePoint+1) + " of " + nrFrames
+ " with channel number " + channelNum);
// Wait for first image to create ImageWindow, so that we can be sure about image size
// Do not actually grab first image here, just make sure it is there
long start = System.currentTimeMillis();
long now = start;
long timeout; // wait 5 seconds for first image to come
timeout = Math.max(5000, Math.round(1.2*computeActualVolumeDuration()));
while (core_.getRemainingImageCount() == 0 && (now - start < timeout)
&& !cancelAcquisition_.get()) {
now = System.currentTimeMillis();
Thread.sleep(5);
}
if (now - start >= timeout) {
throw new Exception("Camera did not send first image within a reasonable time");
}
// grab all the images from the cameras, put them into the acquisition
int[] frNumber = new int[2];
boolean done = false;
long timeout2; // how long to wait between images before timing out
timeout2 = Math.max(2000, Math.round(5*computeActualSlicePeriod()));
start = System.currentTimeMillis();
long last = start;
try {
while ((core_.getRemainingImageCount() > 0
|| core_.isSequenceRunning(firstCamera)
|| (twoSided && core_.isSequenceRunning(secondCamera)))
&& !done) {
now = System.currentTimeMillis();
if (core_.getRemainingImageCount() > 0) { // we have an image to grab
TaggedImage timg = core_.popNextTaggedImage();
String camera = (String) timg.tags.get("Camera");
int frBufferIndex = 0;
int ch = channelNum;
if (twoSided) {
ch = ch * 2;
}
if (camera.equals(secondCamera)) {
ch += 1;
frBufferIndex = 1;
}
if (spimMode.equals(AcquisitionModes.Keys.NO_SCAN) && ! singleTimePointViewers) {
addImageToAcquisition(acqName,
frNumber[frBufferIndex], ch, timePoint,
positionNum, now - acqStart, timg, bq);
} else { // standard
addImageToAcquisition(acqName, timePoint, ch,
frNumber[frBufferIndex], positionNum,
now - acqStart, timg, bq);
}
frNumber[frBufferIndex]++;
last = now; // keep track of last image time
// check to see if we are finished
if (frNumber[0] == frNumber[1] && frNumber[0] == nrSlices) {
done = true;
}
} else { // no image ready yet
done = cancelAcquisition_.get();
Thread.sleep(1);
if (now - last >= timeout2) {
ReportingUtils.logError("Camera did not send all expected images within" +
" a reasonable period for timepoint " + (timePoint+1) + ". Continuing anyway.");
// allow other time points to continue by stopping acquisition manually
// (in normal case the sequence acquisition stops itself after
// all the expected images are returned)
if (core_.isSequenceRunning(firstCamera)) {
core_.stopSequenceAcquisition(firstCamera);
}
if (twoSided && core_.isSequenceRunning(secondCamera)) {
core_.stopSequenceAcquisition(secondCamera);
}
nonfatalError = true;
done = true;
}
}
}
// update count if we stopped in the middle
if (cancelAcquisition_.get()) {
numTimePointsDone_--;
}
} catch (InterruptedException iex) {
MyDialogUtils.showError(iex);
}
}
}
}
} catch (IllegalMonitorStateException ex) {
// do nothing, the acquisition was simply halted during its operation
} catch (MMScriptException mex) {
MyDialogUtils.showError(mex);
} catch (Exception ex) {
MyDialogUtils.showError(ex);
} finally { // end of this acquisition (could be about to restart if separate viewers)
try {
if (core_.isSequenceRunning(firstCamera)) {
core_.stopSequenceAcquisition(firstCamera);
}
if (twoSided && core_.isSequenceRunning(secondCamera)) {
core_.stopSequenceAcquisition(secondCamera);
}
if (autoShutter) {
core_.setAutoShutter(true);
if (shutterOpen) {
core_.setShutterOpen(false);
}
}
bq.put(TaggedImageQueue.POISON);
// TODO: evaluate closeAcquisition call
// at the moment, the Micro-Manager api has a bug that causes
// a closed acquisition not be really closed, causing problems
// when the user closes a window of the previous acquisition
// changed r14705 (2014-11-24)
// gui_.closeAcquisition(acqName);
ReportingUtils.logMessage("diSPIM plugin acquisition " + acqName +
" took: " + (System.currentTimeMillis() - acqStart) + "ms");
} catch (Exception ex) {
// exception while stopping sequence acquisition, not sure what to do...
MyDialogUtils.showError(ex, "Problem while finsihing acquisition");
}
}
}
// cleanup after end of all acquisitions
// reset channel to original
if (changeChannelPerVolumeSoftware) {
multiChannelPanel_.setConfig(originalConfig);
}
// the controller will end with both beams disabled and scan off so reflect
// that in device properties
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.BEAM_ENABLED,
Properties.Values.NO, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.BEAM_ENABLED,
Properties.Values.NO, true);
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SA_MODE_X,
Properties.Values.SAM_DISABLED, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SA_MODE_X,
Properties.Values.SAM_DISABLED, true);
// sets BNC3 output low again
// this only happens after images have all been received (or timeout occurred)
// but if using DemoCam devices then it happens too early
// at least part of the problem is that both DemoCam devices "acquire" at the same time
// instead of actually obeying the controller's triggers
// as a result with DemoCam the side select (BNC4) isn't correct
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.PLOGIC_PRESET,
Properties.Values.PLOGIC_PRESET_2, true);
// move piezos back to center (neutral) position
if (devices_.isValidMMDevice(Devices.Keys.PIEZOA)) {
positions_.setPosition(Devices.Keys.PIEZOA, Joystick.Directions.NONE, 0.0);
}
if (devices_.isValidMMDevice(Devices.Keys.PIEZOB)) {
positions_.setPosition(Devices.Keys.PIEZOB, Joystick.Directions.NONE, 0.0);
}
if (cancelAcquisition_.get()) { // if user stopped us in middle
// make sure to stop the SPIM state machine in case the acquisition was cancelled
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_IDLE, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SPIM_STATE,
Properties.Values.SPIM_IDLE, true);
}
updateAcquisitionStatus(AcquisitionStatus.DONE);
posUpdater_.setAcqRunning(false);
if (separateImageFilesOriginally) {
ImageUtils.setImageStorageClass(TaggedImageStorageDiskDefault.class);
}
cameras_.setSPIMCamerasForAcquisition(false);
if (liveModeOriginally) {
gui_.enableLiveMode(true);
}
if (nonfatalError) {
MyDialogUtils.showError("Non-fatal error occurred during acquisition, see core log for details");
}
return true;
}
@Override
public void saveSettings() {
prefs_.putString(panelName_, Properties.Keys.PLUGIN_DIRECTORY_ROOT,
rootField_.getText());
prefs_.putString(panelName_, Properties.Keys.PLUGIN_NAME_PREFIX,
nameField_.getText());
// save controller settings
props_.setPropValue(Devices.Keys.PIEZOA, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.PIEZOB, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.GALVOA, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.GALVOB, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
props_.setPropValue(Devices.Keys.PLOGIC, Properties.Keys.SAVE_CARD_SETTINGS,
Properties.Values.DO_SSZ, true);
}
/**
* Gets called when this tab gets focus. Refreshes values from properties.
*/
@Override
public void gotSelected() {
posUpdater_.pauseUpdates(true);
props_.callListeners();
if (navigationJoysticksCB_.isSelected()) {
if (ASIdiSPIM.getFrame() != null) {
ASIdiSPIM.getFrame().getNavigationPanel().doJoystickSettings();
}
} else {
joystick_.unsetAllJoysticks(); // disable all joysticks on this tab
}
sliceFrameAdvanced_.setVisible(advancedSliceTimingCB_.isSelected());
posUpdater_.pauseUpdates(false);
}
/**
* called when tab looses focus.
*/
@Override
public void gotDeSelected() {
sliceFrameAdvanced_.setVisible(false);
saveSettings();
}
@Override
public void devicesChangedAlert() {
devices_.callListeners();
}
/**
* Gets called when enclosing window closes
*/
public void windowClosing() {
sliceFrameAdvanced_.savePosition();
sliceFrameAdvanced_.dispose();
}
@Override
public void refreshDisplay() {
updateDurationLabels();
}
private void setRootDirectory(JTextField rootField) {
File result = FileDialogs.openDir(null,
"Please choose a directory root for image data",
MMStudio.MM_DATA_SET);
if (result != null) {
rootField.setText(result.getAbsolutePath());
}
}
/**
* The basic method for adding images to an existing data set. If the
* acquisition was not previously initialized, it will attempt to initialize
* it from the available image data. This version uses a blocking queue and is
* much faster than the one currently implemented in the ScriptInterface
* Eventually, this function should be replaced by the ScriptInterface version
* of the same.
* @param name - named acquisition to add image to
* @param frame - frame nr at which to insert the image
* @param channel - channel at which to insert image
* @param slice - (z) slice at which to insert image
* @param position - position at which to insert image
* @param ms - Time stamp to be added to the image metadata
* @param taggedImg - image + metadata to be added
* @param bq - Blocking queue to which the image should be added. This queue
* should be hooked up to the ImageCache belonging to this acquisitions
* @throws java.lang.InterruptedException
* @throws org.micromanager.utils.MMScriptException
*/
public void addImageToAcquisition(String name,
int frame,
int channel,
int slice,
int position,
long ms,
TaggedImage taggedImg,
BlockingQueue<TaggedImage> bq) throws MMScriptException, InterruptedException {
MMAcquisition acq = gui_.getAcquisition(name);
// verify position number is allowed
if (acq.getPositions() <= position) {
throw new MMScriptException("The position number must not exceed declared"
+ " number of positions (" + acq.getPositions() + ")");
}
// verify that channel number is allowed
if (acq.getChannels() <= channel) {
throw new MMScriptException("The channel number must not exceed declared"
+ " number of channels (" + + acq.getChannels() + ")");
}
JSONObject tags = taggedImg.tags;
if (!acq.isInitialized()) {
throw new MMScriptException("Error in the ASIdiSPIM logic. Acquisition should have been initialized");
}
// create required coordinate tags
try {
MDUtils.setFrameIndex(tags, frame);
tags.put(MMTags.Image.FRAME, frame);
MDUtils.setChannelIndex(tags, channel);
MDUtils.setSliceIndex(tags, slice);
MDUtils.setPositionIndex(tags, position);
MDUtils.setElapsedTimeMs(tags, ms);
MDUtils.setImageTime(tags, MDUtils.getCurrentTime());
MDUtils.setZStepUm(tags, PanelUtils.getSpinnerFloatValue(stepSize_));
if (!tags.has(MMTags.Summary.SLICES_FIRST) && !tags.has(MMTags.Summary.TIME_FIRST)) {
// add default setting
tags.put(MMTags.Summary.SLICES_FIRST, true);
tags.put(MMTags.Summary.TIME_FIRST, false);
}
if (acq.getPositions() > 1) {
// if no position name is defined we need to insert a default one
if (tags.has(MMTags.Image.POS_NAME)) {
tags.put(MMTags.Image.POS_NAME, "Pos" + position);
}
}
// update frames if necessary
if (acq.getFrames() <= frame) {
acq.setProperty(MMTags.Summary.FRAMES, Integer.toString(frame + 1));
}
} catch (JSONException e) {
throw new MMScriptException(e);
}
bq.put(taggedImg);
}
}
|
ASIdiSPIM: file incoming images into appropriate channel, now with hardware volume/slice switching. Successfully able to acquire hardware-switched multichannel acquisitions now!
git-svn-id: 03a8048b5ee8463be5048a3801110fb50f378627@15049 d0ab736e-dc22-4aeb-8dc9-08def0aa14fd
|
plugins/ASIdiSPIM/src/org/micromanager/asidispim/AcquisitionPanel.java
|
ASIdiSPIM: file incoming images into appropriate channel, now with hardware volume/slice switching. Successfully able to acquire hardware-switched multichannel acquisitions now!
|
|
Java
|
mit
|
334dd5dc502b81c965149285fdb8511fde67501f
| 0
|
tkuhn/memetools,tkuhn/memetools
|
package ch.tkuhn.memetools;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParameterException;
public class PrepareWosData {
@Parameter(names = "-v", description = "Write detailed log")
private boolean verbose = false;
@Parameter(names = "-rth", description = "Threshold on references (discard publications with less references)")
private int rth = 0;
@Parameter(names = "-cth", description = "Threshold on citations (discard publications with less citations)")
private int cth = 0;
private File logFile;
public static final void main(String[] args) {
PrepareWosData obj = new PrepareWosData();
JCommander jc = new JCommander(obj);
try {
jc.parse(args);
} catch (ParameterException ex) {
jc.usage();
System.exit(1);
}
obj.run();
}
private static String wosFolder = "wos";
private Map<String,String> titles;
private Map<String,String> years;
private Map<String,String> references;
private Set<FileVisitOption> walkFileTreeOptions;
public PrepareWosData() {
}
public void run() {
init();
try {
readData();
writeDataFile();
writeGmlFile();
} catch (IOException ex) {
log(ex);
System.exit(1);
}
log("Finished");
}
private void init() {
logFile = new File(MemeUtils.getLogDir(), "prepare-wos.log");
log("==========");
log("Starting...");
titles = new HashMap<String,String>();
years = new HashMap<String,String>();
references = new HashMap<String,String>();
walkFileTreeOptions = new HashSet<FileVisitOption>();
walkFileTreeOptions.add(FileVisitOption.FOLLOW_LINKS);
}
private void readData() throws IOException {
File dir = new File(MemeUtils.getRawDataDir(), wosFolder);
log("Reading files from " + dir + " ...");
Files.walkFileTree(dir.toPath(), walkFileTreeOptions, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException {
if (path.toString().endsWith(".txt")) {
readData(path);
}
return FileVisitResult.CONTINUE;
}
});
log("Number of documents: " + titles.size());
}
private void readData(Path path) throws IOException {
log("Reading file to collect IDs: " + path);
BufferedReader reader = new BufferedReader(new FileReader(path.toFile()), 64*1024);
int errors = 0;
String line;
while ((line = reader.readLine()) != null) {
WosEntry entry = new WosEntry(line);
if (!entry.isValid()) {
errors++;
continue;
}
if (entry.refCount < rth) continue;
if (entry.citCount < cth) continue;
titles.put(entry.id, entry.title);
years.put(entry.id, entry.year);
references.put(entry.id, entry.ref);
}
reader.close();
log("Number of errors: " + errors);
}
private void writeDataFile() throws IOException {
String filename = "wos-T";
if (cth > 0) filename += "-c" + cth;
if (rth > 0) filename += "-r" + rth;
File file = new File(MemeUtils.getPreparedDataDir(), filename + ".txt");
BufferedWriter wT = new BufferedWriter(new FileWriter(file));
for (String doi1 : titles.keySet()) {
String text = titles.get(doi1);
String year = years.get(doi1);
DataEntry e = new DataEntry(doi1, year, text);
String refs = references.get(doi1);
while (!refs.isEmpty()) {
String doi2 = refs.substring(0, 9);
refs = refs.substring(9);
e.addCitedText(titles.get(doi2));
}
wT.write(e.getLine() + "\n");
}
wT.close();
}
private void writeGmlFile() throws IOException {
String filename = "wos";
if (cth > 0) filename += "-c" + cth;
if (rth > 0) filename += "-r" + rth;
File file = new File(MemeUtils.getPreparedDataDir(), filename + ".gml");
BufferedWriter w = new BufferedWriter(new FileWriter(file));
w.write("graph [\n");
w.write("directed 1\n");
for (String doi : titles.keySet()) {
String year = years.get(doi);
String text = titles.get(doi);
text = " " + text + " ";
w.write("node [\n");
w.write("id \"" + doi + "\"\n");
w.write("year \"" + year + "\"\n");
// TODO Make this general:
if (text.contains(" quantum ")) w.write("memeQuantum \"y\"\n");
if (text.contains(" traffic ")) w.write("memeTraffic \"y\"\n");
if (text.contains(" black hole ")) w.write("memeBlackHole \"y\"\n");
if (text.contains(" graphene ")) w.write("memeGraphene \"y\"\n");
w.write("]\n");
}
for (String doi1 : references.keySet()) {
String refs = references.get(doi1);
while (!refs.isEmpty()) {
String doi2 = refs.substring(0, 9);
refs = refs.substring(9);
w.write("edge [\n");
w.write("source \"" + doi1 + "\"\n");
w.write("target \"" + doi2 + "\"\n");
w.write("]\n");
}
}
w.write("]\n");
w.close();
}
private void log(Object obj) {
MemeUtils.log(logFile, obj);
}
private void logDetail(Object obj) {
if (verbose) log(obj);
}
// Data format: semicolon-delimited with the following columns:
//
// 0 t9 (9 last digits)
// 1 year
// 2 documentType (one letter)
// 3 doi (optional)
// 4 subject (two letters, optional)
// 5 iso journal (optional)
// 6 volume
// 7 issue
// 8 pages
// 9 title
// 10 noOfAuthors
// 11.. authors
// 11+noOfAuthors noOfJournals
// 12+noOfAuthors... journals (other journal labels)
// 12+noOfAuthors+noOfJournals summary (optional)
// 13+noOfAuthors+noOfJournals references (non-delimited t9)
// 14+noOfAuthors+noOfJournals citations (non-delimited t9)
private class WosEntry {
private boolean valid = false;
String id;
String title;
String year;
String ref;
String cit;
int refCount;
int citCount;
WosEntry(String line) {
String[] parts = line.split(";", -1);
if (parts.length < 15) {
logDetail("Invalid line: " + line);
return;
}
id = parts[0];
if (!id.matches("[0-9]{9}")) {
logDetail("Invalid ID: " + id);
return;
}
year = parts[1];
if (!year.matches("[0-9]{4}")) {
logDetail("Invalid year: " + year);
return;
}
title = parts[9];
if (title.isEmpty()) {
logDetail("Empty title for publication: " + id);
return;
}
ref = parts[parts.length-2];
if (!ref.matches("([0-9]{9})*")) {
logDetail("Invalid references: " + ref);
return;
}
refCount = ref.length() / 9;
cit = parts[parts.length-1];
if (!cit.matches("([0-9]{9})*")) {
logDetail("Invalid citations: " + cit);
return;
}
citCount = cit.length() / 9;
valid = true;
}
boolean isValid() {
return valid;
}
}
}
|
src/main/java/ch/tkuhn/memetools/PrepareWosData.java
|
package ch.tkuhn.memetools;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.file.FileVisitOption;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParameterException;
public class PrepareWosData {
@Parameter(names = "-v", description = "Write detailed log")
private boolean verbose = false;
@Parameter(names = "-rth", description = "Threshold on references (discard publications with less references)")
private int rth = 0;
@Parameter(names = "-cth", description = "Threshold on citations (discard publications with less citations)")
private int cth = 0;
private File logFile;
public static final void main(String[] args) {
PrepareWosData obj = new PrepareWosData();
JCommander jc = new JCommander(obj);
try {
jc.parse(args);
} catch (ParameterException ex) {
jc.usage();
System.exit(1);
}
obj.run();
}
private static String wosFolder = "wos";
private Map<String,String> titles;
private Map<String,String> years;
private Map<String,String> references;
private Set<FileVisitOption> walkFileTreeOptions;
public PrepareWosData() {
}
public void run() {
init();
try {
readData();
writeDataFile();
writeGmlFile();
} catch (IOException ex) {
log(ex);
System.exit(1);
}
log("Finished");
}
private void init() {
logFile = new File(MemeUtils.getLogDir(), "prepare-wos.log");
log("==========");
log("Starting...");
titles = new HashMap<String,String>();
years = new HashMap<String,String>();
references = new HashMap<String,String>();
walkFileTreeOptions = new HashSet<FileVisitOption>();
walkFileTreeOptions.add(FileVisitOption.FOLLOW_LINKS);
}
private void readData() throws IOException {
log("Reading files...");
File dir = new File(MemeUtils.getRawDataDir(), wosFolder);
Files.walkFileTree(dir.toPath(), walkFileTreeOptions, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path path, BasicFileAttributes attrs) throws IOException {
if (path.toString().endsWith(".txt")) {
readData(path);
}
return FileVisitResult.CONTINUE;
}
});
log("Number of documents: " + titles.size());
}
private void readData(Path path) throws IOException {
log("Reading file to collect IDs: " + path);
BufferedReader reader = new BufferedReader(new FileReader(path.toFile()), 64*1024);
int errors = 0;
String line;
while ((line = reader.readLine()) != null) {
WosEntry entry = new WosEntry(line);
if (!entry.isValid()) {
errors++;
continue;
}
if (entry.refCount < rth) continue;
if (entry.citCount < cth) continue;
titles.put(entry.id, entry.title);
years.put(entry.id, entry.year);
references.put(entry.id, entry.ref);
}
reader.close();
log("Number of errors: " + errors);
}
private void writeDataFile() throws IOException {
String filename = "wos-T";
if (cth > 0) filename += "-c" + cth;
if (rth > 0) filename += "-r" + rth;
File file = new File(MemeUtils.getPreparedDataDir(), filename + ".txt");
BufferedWriter wT = new BufferedWriter(new FileWriter(file));
for (String doi1 : titles.keySet()) {
String text = titles.get(doi1);
String year = years.get(doi1);
DataEntry e = new DataEntry(doi1, year, text);
String refs = references.get(doi1);
while (!refs.isEmpty()) {
String doi2 = refs.substring(0, 9);
refs = refs.substring(9);
e.addCitedText(titles.get(doi2));
}
wT.write(e.getLine() + "\n");
}
wT.close();
}
private void writeGmlFile() throws IOException {
String filename = "wos";
if (cth > 0) filename += "-c" + cth;
if (rth > 0) filename += "-r" + rth;
File file = new File(MemeUtils.getPreparedDataDir(), filename + ".gml");
BufferedWriter w = new BufferedWriter(new FileWriter(file));
w.write("graph [\n");
w.write("directed 1\n");
for (String doi : titles.keySet()) {
String year = years.get(doi);
String text = titles.get(doi);
text = " " + text + " ";
w.write("node [\n");
w.write("id \"" + doi + "\"\n");
w.write("year \"" + year + "\"\n");
// TODO Make this general:
if (text.contains(" quantum ")) w.write("memeQuantum \"y\"\n");
if (text.contains(" traffic ")) w.write("memeTraffic \"y\"\n");
if (text.contains(" black hole ")) w.write("memeBlackHole \"y\"\n");
if (text.contains(" graphene ")) w.write("memeGraphene \"y\"\n");
w.write("]\n");
}
for (String doi1 : references.keySet()) {
String refs = references.get(doi1);
while (!refs.isEmpty()) {
String doi2 = refs.substring(0, 9);
refs = refs.substring(9);
w.write("edge [\n");
w.write("source \"" + doi1 + "\"\n");
w.write("target \"" + doi2 + "\"\n");
w.write("]\n");
}
}
w.write("]\n");
w.close();
}
private void log(Object obj) {
MemeUtils.log(logFile, obj);
}
private void logDetail(Object obj) {
if (verbose) log(obj);
}
// Data format: semicolon-delimited with the following columns:
//
// 0 t9 (9 last digits)
// 1 year
// 2 documentType (one letter)
// 3 doi (optional)
// 4 subject (two letters, optional)
// 5 iso journal (optional)
// 6 volume
// 7 issue
// 8 pages
// 9 title
// 10 noOfAuthors
// 11.. authors
// 11+noOfAuthors noOfJournals
// 12+noOfAuthors... journals (other journal labels)
// 12+noOfAuthors+noOfJournals summary (optional)
// 13+noOfAuthors+noOfJournals references (non-delimited t9)
// 14+noOfAuthors+noOfJournals citations (non-delimited t9)
private class WosEntry {
private boolean valid = false;
String id;
String title;
String year;
String ref;
String cit;
int refCount;
int citCount;
WosEntry(String line) {
String[] parts = line.split(";", -1);
if (parts.length < 15) {
logDetail("Invalid line: " + line);
return;
}
id = parts[0];
if (!id.matches("[0-9]{9}")) {
logDetail("Invalid ID: " + id);
return;
}
year = parts[1];
if (!year.matches("[0-9]{4}")) {
logDetail("Invalid year: " + year);
return;
}
title = parts[9];
if (title.isEmpty()) {
logDetail("Empty title for publication: " + id);
return;
}
ref = parts[parts.length-2];
if (!ref.matches("([0-9]{9})*")) {
logDetail("Invalid references: " + ref);
return;
}
refCount = ref.length() / 9;
cit = parts[parts.length-1];
if (!cit.matches("([0-9]{9})*")) {
logDetail("Invalid citations: " + cit);
return;
}
citCount = cit.length() / 9;
valid = true;
}
boolean isValid() {
return valid;
}
}
}
|
Log raw data directory
|
src/main/java/ch/tkuhn/memetools/PrepareWosData.java
|
Log raw data directory
|
|
Java
|
mit
|
79c8273d6b351366773cbf3e1bd79cbbdc0858f8
| 0
|
wizzardo/http,wizzardo/http
|
package com.wizzardo.http.filter;
import com.wizzardo.http.request.Header;
import com.wizzardo.http.request.Request;
import com.wizzardo.http.response.Response;
import com.wizzardo.tools.io.BytesTools;
import com.wizzardo.tools.security.Base64;
import com.wizzardo.tools.security.MD5;
import java.util.HashMap;
import java.util.Map;
/**
* Created by wizzardo on 23.02.15.
*/
public class TokenFilter implements AuthFilter {
protected final long HOUR = 60l * 60 * 1000;
//secret = md5(user:password)
//key = md5(user)
//token = key + md5(timestamp + secret) + timestamp
//hashes - key:secret
protected Map<BytesHolder, BytesHolder> hashes = new HashMap<>();
protected AuthFilter authFilter;
public TokenFilter(AuthFilter authFilter) {
this.authFilter = authFilter;
}
@Override
public boolean filter(Request request, Response response) {
String token;
if ((token = request.param("token")) == null)
return authFilter.filter(request, response);
byte[] data = Base64.decodeFast(token, true);
BytesHolder secret = hashes.get(new BytesHolder(data, 0, 16));
if (secret == null)
return returnNotAuthorized(response);
MD5.create().update(data, 32, 8).update(secret.bytes).asBytes(data, 0);
if (!BytesHolder.equals(data, 0, 16, data, 16, 16))
return returnNotAuthorized(response);
long time = BytesTools.toLong(data, 32);
if (System.currentTimeMillis() > time)
return returnNotAuthorized(response);
return true;
}
@Override
public boolean returnNotAuthorized(Response response) {
return authFilter.returnNotAuthorized(response);
}
@Override
public TokenFilter allow(String user, String password) {
authFilter.allow(user, password);
hashes.put(new BytesHolder(MD5.create().update(user).asBytes()), new BytesHolder(MD5.create().update(user + ":" + password).asBytes()));
return this;
}
@Override
public String getUser(Request request) {
return authFilter.getUser(request);
}
public String generateToken(Request request) {
String auth = request.header(Header.KEY_AUTHORIZATION);
if (auth == null)
return "";
byte[] token = new byte[40]; // 16+16+8
MD5.create().update(getUser(request)).asBytes(token, 0); // key
BytesHolder secret = hashes.get(new BytesHolder(token, 0, 16));
long timestamp = System.currentTimeMillis() + HOUR * 12;
BytesTools.toBytes(timestamp, token, 32, 8);
MD5.create().update(token, 32, 8).update(secret.bytes).asBytes(token, 16);
return Base64.encodeToString(token, false, true);
}
private String sign(String timestamp, String secret) {
return MD5.create().update(timestamp + secret).asString();
}
private String sign(long timestamp, String secret) {
return sign(String.valueOf(timestamp), secret);
}
static class BytesHolder {
final byte[] bytes;
final int offset;
final int length;
int hash;
BytesHolder(byte[] bytes, int offset, int length) {
this.bytes = bytes;
this.offset = offset;
this.length = length;
}
BytesHolder(byte[] bytes) {
this(bytes, 0, bytes.length);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BytesHolder that = (BytesHolder) o;
return equals(bytes, offset, length, that.bytes, that.offset, that.length);
}
static boolean equals(byte[] b1, int o1, int l1, byte[] b2, int o2, int l2) {
if (l1 != l2)
return false;
for (int i = 0; i < l1; i++) {
if (b1[o1 + i] != b2[o2 + i])
return false;
}
return true;
}
@Override
public int hashCode() {
if (hash != 0)
return hash;
int result = 1;
byte[] b = bytes;
for (int i = offset; i < length - offset; i++) {
byte element = b[i];
result = 31 * result + element;
}
hash = result;
return result;
}
}
}
|
src/main/java/com/wizzardo/http/filter/TokenFilter.java
|
package com.wizzardo.http.filter;
import com.wizzardo.http.request.Header;
import com.wizzardo.http.request.Request;
import com.wizzardo.http.response.Response;
import com.wizzardo.tools.security.MD5;
import java.util.HashMap;
import java.util.Map;
/**
* Created by wizzardo on 23.02.15.
*/
public class TokenFilter implements AuthFilter {
protected final long HOUR = 60l * 60 * 1000;
//secret = md5(user:password)
//key = md5(user)
//token = key + md5(timestamp + secret) + timestamp
//hashes - key:secret
protected Map<String, String> hashes = new HashMap<>();
protected AuthFilter authFilter;
public TokenFilter(AuthFilter authFilter) {
this.authFilter = authFilter;
}
@Override
public boolean filter(Request request, Response response) {
String token;
if ((token = request.param("token")) == null)
return authFilter.filter(request, response);
if (token.length() <= 64)
return returnNotAuthorized(response);
String secret = hashes.get(token.substring(0, 32));
if (secret == null)
return returnNotAuthorized(response);
String sign = token.substring(32, 64);
String timestamp = token.substring(64);
if (!sign.equals(sign(timestamp, secret)))
return returnNotAuthorized(response);
long time;
try {
time = Long.parseLong(timestamp);
} catch (NumberFormatException e) {
return returnNotAuthorized(response);
}
if (System.currentTimeMillis() > time)
return returnNotAuthorized(response);
return true;
}
@Override
public boolean returnNotAuthorized(Response response) {
return authFilter.returnNotAuthorized(response);
}
@Override
public TokenFilter allow(String user, String password) {
authFilter.allow(user, password);
hashes.put(MD5.create().update(user).asString(), MD5.create().update(user + ":" + password).asString());
return this;
}
@Override
public String getUser(Request request) {
return authFilter.getUser(request);
}
public String generateToken(Request request) {
String auth = request.header(Header.KEY_AUTHORIZATION);
if (auth == null)
return "";
String key = MD5.create().update(getUser(request)).asString();
String secret = hashes.get(key);
long timestamp = System.currentTimeMillis() + HOUR * 12;
return key + sign(timestamp, secret) + timestamp;
}
private String sign(String timestamp, String secret) {
return MD5.create().update(timestamp + secret).asString();
}
private String sign(long timestamp, String secret) {
return sign(String.valueOf(timestamp), secret);
}
}
|
optimise token
|
src/main/java/com/wizzardo/http/filter/TokenFilter.java
|
optimise token
|
|
Java
|
agpl-3.0
|
18b10aa4a3ba77f0e7b2465acc7019c71ab19de3
| 0
|
elki-project/elki,elki-project/elki,elki-project/elki
|
package de.lmu.ifi.dbs.utilities;
import de.lmu.ifi.dbs.distance.Distance;
/**
* QueryResult holds the id of a database object and its distance to a special
* query object.
*
* @author Elke Achtert (<a href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>)
*/
public class QueryResult<D extends Distance<D>> implements Comparable<QueryResult<D>> {
/**
* The id of the underlying database object.
*/
private final int id;
/**
* The distance of the underlying database object to the query object.
*/
private final D distance;
/**
* Creates a new QueryResult object.
*
* @param id the id of the underlying database object
* @param distance the distance of the underlying database object to the query
* object
*/
public QueryResult(int id, D distance) {
this.id = id;
this.distance = distance;
}
/**
* Returns the id of the underlying database object.
*
* @return the id of the underlying database object
*/
public int getID() {
return id;
}
/**
* Returns the distance of the underlying database object to the query
* object.
*
* @return the distance of the underlying database object to the query
* object
*/
public D getDistance() {
return distance;
}
/**
* Compares this QueryResult with the given QueryResult with respect to
* the distances.
*
* @see java.lang.Comparable#compareTo(Object)
*/
public int compareTo(QueryResult<D> o) {
//noinspection unchecked
int compare = distance.compareTo(o.getDistance());
if (compare != 0)
{
return compare;
}
else
{
return this.getID() - o.getID();
}
}
/**
* Returns a string representation of this QueryResult object.
*
* @return a string representation of this QueryResult object.
*/
@Override
public String toString() {
return id + " (" + distance + ")";
}
/**
* Indicates whether some other object is "equal to" this one.
*
* @param o the reference object with which to compare.
* @return <code>true</code> if this object is the same as the o
* argument; <code>false</code> otherwise.
*/
@Override
public boolean equals(Object o) {
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
final QueryResult<D> that = (QueryResult<D>) o;
if (id != that.id)
{
return false;
}
return distance.equals(that.distance);
}
/**
* Returns a hash code value for this object.
*
* @return a hash code value for this object
*/
@Override
public int hashCode() {
return id;
}
}
|
src/de/lmu/ifi/dbs/utilities/QueryResult.java
|
package de.lmu.ifi.dbs.utilities;
import de.lmu.ifi.dbs.distance.Distance;
/**
* QueryResult holds the id of a database object and its distance to a special
* query object.
*
* @author Elke Achtert (<a href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>)
*/
public class QueryResult<D extends Distance> implements Comparable<QueryResult<D>> {
/**
* The id of the underlying database object.
*/
private final int id;
/**
* The distance of the underlying database object to the query object.
*/
private final D distance;
/**
* Creates a new QueryResult object.
*
* @param id the id of the underlying database object
* @param distance the distance of the underlying database object to the query
* object
*/
public QueryResult(int id, D distance) {
this.id = id;
this.distance = distance;
}
/**
* Returns the id of the underlying database object.
*
* @return the id of the underlying database object
*/
public int getID() {
return id;
}
/**
* Returns the distance of the underlying database object to the query
* object.
*
* @return the distance of the underlying database object to the query
* object
*/
public D getDistance() {
return distance;
}
/**
* Compares this QueryResult with the given QueryResult with respect to
* the distances.
*
* @see java.lang.Comparable#compareTo(Object)
*/
public int compareTo(QueryResult<D> o) {
//noinspection unchecked
int compare = distance.compareTo(o.getDistance());
if (compare != 0) return compare;
else
return this.getID() - o.getID();
}
/**
* Returns a string representation of this QueryResult object.
*
* @return a string representation of this QueryResult object.
*/
public String toString() {
return id + " (" + distance + ")";
}
/**
* Indicates whether some other object is "equal to" this one.
*
* @param o the reference object with which to compare.
* @return <code>true</code> if this object is the same as the o
* argument; <code>false</code> otherwise.
*/
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final QueryResult that = (QueryResult) o;
if (id != that.id) return false;
return distance.equals(that.distance);
}
/**
* Returns a hash code value for this object.
*
* @return a hash code value for this object
*/
public int hashCode() {
return id;
}
}
|
debugging
|
src/de/lmu/ifi/dbs/utilities/QueryResult.java
|
debugging
|
|
Java
|
agpl-3.0
|
27ae7b51a4961b69d17d02ecb12da20fd142cb31
| 0
|
imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms
|
package com.imcode.imcms.domain.service.api;
import com.imcode.imcms.WebAppSpringTestConfig;
import com.imcode.imcms.api.SourceFile;
import com.imcode.imcms.api.exception.FileAccessDeniedException;
import com.imcode.imcms.components.datainitializer.DocumentDataInitializer;
import com.imcode.imcms.components.datainitializer.TemplateDataInitializer;
import com.imcode.imcms.domain.dto.DocumentDTO;
import com.imcode.imcms.domain.dto.TextDocumentTemplateDTO;
import com.imcode.imcms.domain.exception.EmptyFileNameException;
import com.imcode.imcms.domain.service.FileService;
import com.imcode.imcms.domain.service.TextDocumentTemplateService;
import com.imcode.imcms.model.Template;
import com.imcode.imcms.model.TemplateGroup;
import com.imcode.imcms.persistence.entity.TemplateJPA;
import com.imcode.imcms.persistence.repository.TemplateRepository;
import org.apache.commons.io.FilenameUtils;
import org.apache.uima.util.FileUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static com.imcode.imcms.api.SourceFile.FileType.DIRECTORY;
import static com.imcode.imcms.api.SourceFile.FileType.FILE;
import static org.junit.jupiter.api.Assertions.*;
@Transactional
public class FileServiceTest extends WebAppSpringTestConfig {
private final String testFileName = "fileName.jsp";
private final String testFileName2 = "fileName2.txt";
private final String testTemplateName = "templateTest";
private final String testDirectoryName = "dirName";
private final String testDirectoryName2 = testDirectoryName + "two";
private final String testDirectoryName3 = testDirectoryName + "three";
@Autowired
private FileService fileService;
@Autowired
private TemplateDataInitializer templateDataInitializer;
@Autowired
private DocumentDataInitializer documentDataInitializer;
@Autowired
private TextDocumentTemplateService documentTemplateService;
@Autowired
private TemplateRepository templateRepository;
@Value("#{'${FileAdminRootPaths}'.split(';')}")
private List<Path> testRootPaths;
@BeforeEach
@AfterEach
public void setUp() {
templateDataInitializer.cleanRepositories();
documentDataInitializer.cleanRepositories();
testRootPaths.stream().map(Path::toFile).forEach(FileUtils::deleteRecursive);
}
@Test
public void getDocumentsByTemplateName_When_TemplateHasDocuments_Expected_CorrectSize() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path template = firstRootPath.resolve(testTemplateName);
final String templateName = template.getFileName().toString();
DocumentDTO document = documentDataInitializer.createData();
Files.createDirectory(firstRootPath);
Files.createFile(template);
templateDataInitializer.createData(document.getId(), templateName, templateName);
List<DocumentDTO> documents = fileService.getDocumentsByTemplatePath(template);
assertEquals(1, documents.size());
}
@Test
public void getDocumentsByTemplateName_When_TemplateHasNotDocuments_Expected_EmptyResult() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path template = firstRootPath.resolve(testTemplateName);
final String templateName = template.getFileName().toString();
Files.createDirectory(firstRootPath);
Files.createFile(template);
templateDataInitializer.createData(templateName);
List<DocumentDTO> documents = fileService.getDocumentsByTemplatePath(template);
assertTrue(documents.isEmpty());
assertEquals(0, documents.size());
}
@Test
public void getDocumentsByTemplateName_When_TemplateNotFileButNameExists_Expected_CorrectSize() throws IOException {
final String testTemplateName = "test";
DocumentDTO document = documentDataInitializer.createData();
templateDataInitializer.createData(document.getId(), testTemplateName, testTemplateName);
List<DocumentDTO> documents = fileService.getDocumentsByTemplatePath(Paths.get(testTemplateName));
assertEquals(1, documents.size());
}
@Test
public void getDocumentsByTemplateName_When_TemplateNotFileButNameNotExists_Expected_CorrectException() {
final String testTemplateName = "fakeTest";
assertThrows(FileAccessDeniedException.class, () -> fileService.getDocumentsByTemplatePath(Paths.get(testTemplateName)));
}
@Test
public void getDocumentsByTemplateName_When_TemplateToOutSideRootDir_Expected_CorrectException() {
final Path pathOutSide = testRootPaths.get(0).resolve(testTemplateName);
final String templateName = pathOutSide.getFileName().toString();
DocumentDTO document = documentDataInitializer.createData();
templateDataInitializer.createData(document.getId(), templateName, templateName);
assertThrows(FileAccessDeniedException.class, () -> fileService.getDocumentsByTemplatePath(pathOutSide));
}
@Test
public void getDocumentsByTemplateName_When_GetImage_Expected_EmptyResult() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path imagePath = firstRootPath.resolve("image.png");
Files.createDirectory(firstRootPath);
Files.createFile(imagePath);
assertTrue(fileService.getDocumentsByTemplatePath(imagePath).isEmpty());
}
@Test
public void getRootPaths_When_PathsCorrect_Expected_CorrectSourceFiles() throws IOException {
final Path firstRootDir = testRootPaths.get(0);
final Path secondRootDir = testRootPaths.get(1);
Files.createDirectory(firstRootDir);
Files.createDirectory(secondRootDir);
final List<SourceFile> files = Arrays.asList(
new SourceFile(firstRootDir.getFileName().toString(), firstRootDir.toString(), DIRECTORY, null),
new SourceFile(secondRootDir.getFileName().toString(), secondRootDir.toString(), DIRECTORY, null)
);
assertEquals(files.size(), fileService.getRootFiles().size());
assertEquals(files, fileService.getRootFiles());
}
@Test
public void getFiles_When_FilesInDirectoryExist_Expected_CorrectFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createFile(pathFile);
final List<SourceFile> files = Collections.singletonList(
new SourceFile(pathFile.getFileName().toString(), pathFile.toString(), FILE, Collections.EMPTY_LIST)
);
final List<SourceFile> foundFiles = fileService.getFiles(pathDir);
assertEquals(files.size(), foundFiles.size());
assertEquals(files, foundFiles);
}
@Test
public void getFiles_When_DirectoryHasFolderAndFile_Expected_CorrectSize() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathDir2 = pathDir.resolve(testDirectoryName2);
Files.createDirectories(pathDir2);
Files.createFile(pathFile);
final List<SourceFile> expectedFiles = Arrays.asList(
new SourceFile(pathDir2.getFileName().toString(), pathDir2.toString(), DIRECTORY, null),
new SourceFile(pathFile.getFileName().toString(), pathFile.toString(), FILE, Collections.EMPTY_LIST)
);
final List<SourceFile> foundFiles = fileService.getFiles(pathDir);
assertEquals(expectedFiles.size(), foundFiles.size());
}
@Test
public void getFiles_When_FilesInDirectoryNotExist_Expected_EmptyResult() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
assertEquals(0, fileService.getFiles(pathDir).size());
}
@Test
public void createFile_WhenFileNameEmpty_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
Files.createDirectories(pathDir);
final Path pathNewFile = pathDir.resolve(" ");
final SourceFile newFile = new SourceFile(
pathNewFile.getFileName().toString(), pathNewFile.toString(), FILE, Collections.EMPTY_LIST
);
assertThrows(EmptyFileNameException.class, () -> fileService.createFile(newFile, false));
}
@Test
public void createFile_WhenFileNotExist_Expected_CreatedFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
Files.createDirectories(pathDir);
final Path pathNewFile = pathDir.resolve(testFileName);
final SourceFile newFile = new SourceFile(pathNewFile.getFileName().toString(), pathNewFile.toString(), FILE, Collections.EMPTY_LIST);
final SourceFile createdFile = fileService.createFile(newFile, false);
assertTrue(Files.exists(pathNewFile));
assertEquals(newFile, createdFile);
}
@Test
public void createFile_When_FileCreateToOutSideRootDir_Expected_CorrectException() throws IOException {
Files.createDirectory(testRootPaths.get(0));
final Path pathFile = Paths.get(testFileName);
final Path pathDir = Paths.get(testFileName);
assertFalse(Files.exists(pathFile));
final SourceFile newFile = new SourceFile(pathFile.getFileName().toString(), pathFile.toString(), FILE, Collections.EMPTY_LIST);
final SourceFile newDir = new SourceFile(pathDir.getFileName().toString(), pathDir.toString(), DIRECTORY, null);
assertThrows(FileAccessDeniedException.class, () -> fileService.createFile(newFile, false));
assertThrows(FileAccessDeniedException.class, () -> fileService.createFile(newDir, true));
assertFalse(Files.exists(pathFile));
assertFalse(Files.exists(pathDir));
}
@Test
public void saveFile_When_FileExistAndOverWrite_Expected_SavedFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
final String testText = "bla-bla-bla";
final SourceFile saved = fileService.saveFile(pathFile, Collections.singletonList(testText), null);
assertNotNull(saved);
assertTrue(Files.exists(Paths.get(saved.getFullPath())));
List<String> line = saved.getContents();
assertEquals(1, line.size());
String savedContent = line.get(0);
assertEquals(testText, savedContent);
}
@Test
public void saveFile_When_FileExistAndNotOverWrite_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile2 = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile2);
final String testText = "bla-bla-bla";
assertThrows(FileAlreadyExistsException.class, () -> fileService.saveFile(
pathFile2, Collections.singletonList(testText), StandardOpenOption.CREATE_NEW));
}
@Test
public void getFile_When_FileExists_Expected_CorrectFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
assertEquals(pathFile.toString(), fileService.getFile(pathFile).toString());
}
@Test
public void getFile_When_PathFileContainsCommandCharacters_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path testPath = Paths.get("../");
final Path testPath2 = Paths.get("./");
final Path testPath3 = Paths.get("/~/");
final Path testPath4 = Paths.get(".././~/../.");
final Path testPath5 = firstRootPath.resolve("../");
final Path testPath6 = firstRootPath.resolve("./");
final Path testPath7 = firstRootPath.resolve("/~/");
final Path testPath8 = firstRootPath.resolve(".././~/../.");
Files.createDirectory(firstRootPath);
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath2));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath3));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath4));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath5));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath7));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath8));
assertTrue(Files.exists(fileService.getFile(testPath6)));
}
@Test
public void getFile_When_PathFileNotExist_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
assertFalse(Files.exists(pathFile));
assertThrows(NoSuchFileException.class, () -> fileService.getFile(pathFile));
}
@Test
public void getFile_When_PathFileFromToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path outDirRoot = Paths.get(firstRootPath.getParent().toString());
Files.createDirectory(firstRootPath);
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(outDirRoot));
}
@Test
public void deleteFile_When_FileExists_Expected_Delete() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
fileService.deleteFile(pathFile);
assertFalse(Files.exists(pathFile));
}
@Test
public void deleteDir_When_DirHasFiles_Expected_Delete() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFile2 = pathDir.resolve(testFileName2);
final Path pathDir2 = pathDir.resolve(testDirectoryName2);
final Path pathFile3 = pathDir2.resolve("test" + testFileName2);
Files.createDirectories(pathDir2);
Files.createFile(pathFile);
Files.createFile(pathFile2);
Files.createFile(pathFile3);
fileService.deleteFile(pathDir);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2));
}
@Test
public void deleteFile_When_FileNotExists_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
final String fakeName = "fake.txt";
final Path fakePathFile = pathDir.resolve(fakeName);
assertThrows(NoSuchFileException.class, () -> fileService.deleteFile(fakePathFile));
}
@Test
public void deleteDir_When_DirIsEmpty_Expected_Deleted() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
assertEquals(1, Files.list(firstRootPath).count());
fileService.deleteFile(pathDir);
assertEquals(0, Files.list(firstRootPath).count());
}
@Test
public void copyFile_When_SrcFileExists_Expected_CopyFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathDir2 = firstRootPath.resolve(testDirectoryName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createDirectory(pathDir2);
Files.createFile(pathFile);
assertEquals(0, Files.list(pathDir2).count());
fileService.copyFile(Collections.singletonList(pathFile), pathDir2);
assertEquals(1, Files.list(pathDir).count());
assertEquals(1, Files.list(pathDir2).count());
}
@Test
public void copyFiles_When_FilesExist_Expected_CopyFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileInDir = pathDir.resolve(testFileName);
final Path pathFile2InDir = pathDir.resolve(testFileName2);
final Path pathDirTarget = firstRootPath.resolve(testDirectoryName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createDirectory(pathDirTarget);
List<Path> paths = new ArrayList<>();
paths.add(Files.createFile(pathFileInDir));
paths.add(Files.createFile(pathFile2InDir));
assertEquals(2, Files.list(pathDir).count());
assertEquals(0, Files.list(pathDirTarget).count());
fileService.copyFile(paths, pathDirTarget);
assertEquals(2, Files.list(pathDir).count());
assertEquals(2, Files.list(pathDirTarget).count());
}
@Test
public void copyFile_When_FileCopyToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeFile2 = Paths.get("test");
assertFalse(Files.exists(pathDir));
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createFile(pathFile);
assertThrows(FileAccessDeniedException.class, () -> fileService.copyFile(
Collections.singletonList(pathFile), pathFakeFile2));
assertThrows(FileAccessDeniedException.class, () -> fileService.copyFile(Collections.singletonList(
pathFakeFile2), pathFile));
}
@Test
public void moveFiles_When_FilesExist_Expected_MoveFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileInDir = pathDir.resolve(testFileName);
final Path pathFile2InDir = pathDir.resolve(testFileName2);
final Path pathDirTarget = firstRootPath.resolve(testDirectoryName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createDirectory(pathDirTarget);
List<Path> paths = new ArrayList<>();
paths.add(Files.createFile(pathFileInDir));
paths.add(Files.createFile(pathFile2InDir));
assertEquals(2, Files.list(pathDir).count());
assertEquals(0, Files.list(pathDirTarget).count());
fileService.moveFile(paths, pathDirTarget);
assertEquals(0, Files.list(pathDir).count());
assertEquals(2, Files.list(pathDirTarget).count());
}
@Test
public void moveFile_When_FileExist_Expected_moveCorrectFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2 = firstRootPath.resolve(testDirectoryName2);
final Path pathFileByDir = firstRootPath.resolve(testFileName);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2));
Files.createDirectories(pathDir2);
Files.createDirectory(pathDir);
List<Path> paths = Collections.singletonList(Files.createFile(pathFileByDir));
assertEquals(3, fileService.getFiles(firstRootPath).size());
assertEquals(0, fileService.getFiles(pathDir).size());
fileService.moveFile(paths, pathDir);
assertFalse(Files.exists(pathFileByDir));
assertEquals(2, fileService.getFiles(firstRootPath).size());
assertEquals(1, fileService.getFiles(pathDir).size());
}
@Test
public void moveFile_When_FileExist_Expected_RenameFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileByDir = firstRootPath.resolve(testFileName);
final Path pathFile2ByDir = pathDir.resolve(testFileName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFileByDir);
assertEquals(2, fileService.getFiles(firstRootPath).size());
assertEquals(0, fileService.getFiles(pathDir).size());
SourceFile moved = fileService.moveFile(pathFileByDir, pathFile2ByDir);
assertFalse(Files.exists(pathFileByDir));
assertEquals(1, fileService.getFiles(firstRootPath).size());
assertEquals(1, fileService.getFiles(pathDir).size());
assertNotEquals(pathFileByDir.getFileName(), moved.getFileName());
}
@Test
public void moveFile_When_FilesMoveToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeDir = Paths.get("outSideDir");
final Path pathFakeFile2 = pathFakeDir.resolve(testRootPaths.get(0).getFileName());
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
List<Path> paths = Collections.singletonList(Files.createFile(pathFile));
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(paths, pathFakeFile2));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
assertThrows(FileAccessDeniedException.class,
() -> fileService.moveFile(Collections.singletonList(pathFakeFile2), pathFile));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
}
@Test
public void moveFile_When_FileMoveToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeDir = Paths.get("outSideDir");
final Path pathFakeFile2 = pathFakeDir.resolve(testRootPaths.get(0).getFileName());
Files.createDirectories(pathDir);
Files.createFile(pathFile);
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(pathFile, pathFakeFile2));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(pathFakeFile2, pathFile));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
}
@Test
public void moveFile_When_FileRenameToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeDir = Paths.get("outSideDir");
final Path pathFakeFile2 = pathFakeDir.resolve(testRootPaths.get(0).getFileName());
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(pathFile, pathFakeFile2));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
assertThrows(FileAccessDeniedException.class,
() -> fileService.moveFile(pathFakeFile2, pathFile));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
}
@Test
public void getFiles_WhenFilesHaveSubFiles_Expected_CorrectSize() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2ByDir = pathDir.resolve(testDirectoryName2);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathFile2ByDir2 = pathDir2ByDir.resolve(testFileName2);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2ByDir));
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createDirectory(pathDir2ByDir);
Files.createFile(pathFileByDir);
Files.createFile(pathFile2ByDir2);
assertFalse(fileService.getFiles(pathDir).isEmpty());
assertEquals(2, fileService.getFiles(pathDir).size());
}
@Test
public void getFiles_When_OrderNotCorrect_Expected_CorrectOrder() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
Files.createDirectory(firstRootPath);
final Path file1 = firstRootPath.resolve(testFileName);
final Path file2 = firstRootPath.resolve(testFileName2);
final Path directory1 = firstRootPath.resolve(testDirectoryName);
final Path directory2 = firstRootPath.resolve(testDirectoryName2);
Files.createFile(file1);
Files.createDirectory(directory1);
Files.createFile(file2);
Files.createDirectory(directory2);
List<SourceFile> receivedFiles = fileService.getFiles(firstRootPath);
assertEquals(DIRECTORY, receivedFiles.get(0).getFileType());
assertEquals(DIRECTORY, receivedFiles.get(1).getFileType());
assertEquals(FILE, receivedFiles.get(2).getFileType());
assertEquals(FILE, receivedFiles.get(3).getFileType());
}
@Test
public void copyFiles_When_FilesExists_Expected_CopyFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathFile2ByDir = pathDir.resolve(testFileName2);
final Path targetDir = pathDir.resolve(testDirectoryName2);
Files.createDirectories(pathDir);
Files.createDirectory(targetDir);
List<Path> paths = new ArrayList<>();
paths.add(Files.createFile(pathFileByDir));
paths.add(Files.createFile(pathFile2ByDir));
assertEquals(3, Files.list(pathDir).count());
assertEquals(0, Files.list(targetDir).count());
fileService.copyFile(paths, targetDir);
assertEquals(3, Files.list(pathDir).count());
assertEquals(2, Files.list(targetDir).count());
}
@Test
public void copyDirectory_When_DirectoryExists_Expected_CopyDirectory() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2ByDir = pathDir.resolve(testDirectoryName2);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathDir3 = firstRootPath.resolve(testDirectoryName3);
assertFalse(Files.exists(firstRootPath));
Files.createDirectories(pathDir2ByDir);
Files.createDirectory(pathDir3);
Files.createFile(pathFileByDir);
fileService.copyFile(Collections.singletonList(pathDir2ByDir), pathDir3);
assertTrue(Files.exists(pathDir2ByDir));
assertTrue(Files.exists(pathDir3));
assertEquals(1, Files.list(pathDir3).count());
}
@Test
public void moveDirectory_When_DirectoryNotEmpty_Expected_moveCorrectDirectory() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2ByDir = pathDir.resolve(testDirectoryName3);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathDir3 = firstRootPath.resolve(testDirectoryName3);
assertFalse(Files.exists(firstRootPath));
Files.createDirectories(pathDir2ByDir);
Files.createDirectory(pathDir3);
Files.createFile(pathFileByDir);
fileService.moveFile(Collections.singletonList(pathDir2ByDir), pathDir3);
assertFalse(Files.exists(pathDir2ByDir));
assertTrue(Files.exists(pathDir3));
}
@Test
public void moveDirectory_When_SelectedTwoDirectories_Expected_moveDirectories() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2 = pathDir.resolve(testDirectoryName2);
final Path pathDirTarget = firstRootPath.resolve(testDirectoryName3);
final Path pathFileByDir = pathDir.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createDirectory(pathDir2);
Files.createDirectory(pathDirTarget);
Files.createFile(pathFileByDir);
List<Path> testPaths = new ArrayList<>();
testPaths.add(pathDir2);
testPaths.add(pathDir);
assertEquals(2, Files.list(firstRootPath).count());
assertEquals(0, Files.list(pathDirTarget).count());
fileService.moveFile(testPaths, pathDirTarget);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2));
assertEquals(1, Files.list(firstRootPath).count());
assertEquals(2, Files.list(pathDirTarget).count());
}
@Test
public void moveFiles_When_FilesExist_Expected_moveFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2 = firstRootPath.resolve(testDirectoryName2);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathFile3ByDir = pathDir.resolve("bla" + testFileName2);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createDirectory(pathDir2);
List<Path> src = new ArrayList<>();
src.add(Files.createDirectory(pathFileByDir));
src.add(Files.createDirectory(pathFile3ByDir));
assertEquals(0, Files.list(pathDir2).count());
fileService.moveFile(src, pathDir2);
assertFalse(Files.exists(pathFileByDir));
assertFalse(Files.exists(pathFile3ByDir));
assertEquals(2, Files.list(firstRootPath).count());
assertEquals(2, Files.list(pathDir2).count());
}
@Test
public void renameFile_When_FileNameEmpty_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathTest = firstRootPath.resolve(testDirectoryName);
final Path pathTarget = firstRootPath.resolve(" ");
Files.createDirectories(pathTest);
assertThrows(EmptyFileNameException.class, () -> fileService.moveFile(pathTest, pathTarget));
assertFalse(Files.exists(pathTarget));
}
@Test
public void renameTemplateFile_When_TemplateUseDocuments_Expected_Rename() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathTest = firstRootPath.resolve(testDirectoryName);
final Path pathTarget = firstRootPath.resolve(testDirectoryName2);
final String namePathTest = pathTest.getFileName().toString();
final String namePathTarget = pathTarget.getFileName().toString();
final DocumentDTO document = documentDataInitializer.createData();
final Template template = templateDataInitializer.createData(namePathTest);
templateDataInitializer.createData(document.getId(), template.getName(), template.getName());
Files.createDirectories(pathTest);
fileService.moveFile(pathTest, pathTarget);
final List<TextDocumentTemplateDTO> docsByTemplNameTarget = documentTemplateService.getByTemplateName(namePathTarget);
assertTrue(documentTemplateService.getByTemplateName(namePathTest).isEmpty());
assertFalse(docsByTemplNameTarget.isEmpty());
assertEquals(1, docsByTemplNameTarget.size());
assertFalse(Files.exists(pathTest));
}
@Test
public void saveTemplateFileInGroup_When_templateFileNotExistsInGroup_Expected_ChangeTemplateGroup() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathFileByRoot = firstRootPath.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createFile(pathFileByRoot);
final String originalFileName = FilenameUtils.removeExtension(pathFileByRoot.getFileName().toString());
Template template = templateDataInitializer.createData(originalFileName);
TemplateGroup testGroup = templateDataInitializer.createData("testGroup", 2, false);
assertNull(template.getTemplateGroup());
Template savedTemplate = fileService.saveTemplateInGroup(pathFileByRoot, testGroup.getName());
assertNotNull(savedTemplate.getTemplateGroup());
assertEquals(testGroup.getName(), savedTemplate.getTemplateGroup().getName());
}
@Test
public void saveTemplateFileInGroup_When_templateFileExistsInGroup_Expected_ChangeTemplateGroup() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathFileByRoot = firstRootPath.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createFile(pathFileByRoot);
final String originalFileName = FilenameUtils.removeExtension(pathFileByRoot.getFileName().toString());
TemplateJPA template = new TemplateJPA(templateDataInitializer.createData(originalFileName));
List<TemplateGroup> testGroups = templateDataInitializer.createTemplateGroups(2);
TemplateGroup testTemplateGroup = testGroups.get(0);
template.setTemplateGroup(testTemplateGroup);
TemplateJPA saved = templateRepository.save(template);
assertNotNull(saved.getTemplateGroup());
assertEquals(testTemplateGroup.getName(), saved.getTemplateGroup().getName());
final TemplateGroup expectedTemplateGroup = testGroups.get(1);
final Template changedTemplate = fileService.saveTemplateInGroup(pathFileByRoot, expectedTemplateGroup.getName());
assertNotNull(changedTemplate.getTemplateGroup());
assertNotEquals(testTemplateGroup.getName(), changedTemplate.getTemplateGroup().getName());
}
@Test
public void saveTemplateFileInGroup_When_templateFileNameEmpty_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path emptyFileName = firstRootPath.resolve(" ");
Files.createDirectory(firstRootPath);
final TemplateGroup testGroup = templateDataInitializer.createData(
"testGroup", 2, false);
assertThrows(EmptyFileNameException.class, () -> fileService.saveTemplateInGroup(emptyFileName, testGroup.getName()));
assertEquals(2, testGroup.getTemplates().size());
}
@Test
public void saveTemplateFileInGroup_When_templateFileInOutSideRoot_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathTemplateFile = firstRootPath.getParent().resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createFile(pathTemplateFile);
final TemplateGroup testGroup = templateDataInitializer.createData(
"testGroup", 2, false);
assertThrows(FileAccessDeniedException.class, () -> fileService.saveTemplateInGroup(pathTemplateFile, testGroup.getName()));
Files.delete(pathTemplateFile);
}
}
|
src/test/java/com/imcode/imcms/domain/service/api/FileServiceTest.java
|
package com.imcode.imcms.domain.service.api;
import com.imcode.imcms.WebAppSpringTestConfig;
import com.imcode.imcms.api.SourceFile;
import com.imcode.imcms.api.exception.FileAccessDeniedException;
import com.imcode.imcms.components.datainitializer.DocumentDataInitializer;
import com.imcode.imcms.components.datainitializer.TemplateDataInitializer;
import com.imcode.imcms.domain.dto.DocumentDTO;
import com.imcode.imcms.domain.dto.TextDocumentTemplateDTO;
import com.imcode.imcms.domain.exception.EmptyFileNameException;
import com.imcode.imcms.domain.service.FileService;
import com.imcode.imcms.domain.service.TextDocumentTemplateService;
import com.imcode.imcms.model.Template;
import com.imcode.imcms.model.TemplateGroup;
import com.imcode.imcms.persistence.entity.TemplateJPA;
import com.imcode.imcms.persistence.repository.TemplateRepository;
import org.apache.commons.io.FilenameUtils;
import org.apache.uima.util.FileUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static com.imcode.imcms.api.SourceFile.FileType.DIRECTORY;
import static com.imcode.imcms.api.SourceFile.FileType.FILE;
import static org.junit.jupiter.api.Assertions.*;
@Transactional
public class FileServiceTest extends WebAppSpringTestConfig {
private final String testFileName = "fileName.jsp";
private final String testFileName2 = "fileName2.txt";
private final String testTemplateName = "templateTest";
private final String testDirectoryName = "dirName";
private final String testDirectoryName2 = testDirectoryName + "two";
private final String testDirectoryName3 = testDirectoryName + "three";
@Autowired
private FileService fileService;
@Autowired
private TemplateDataInitializer templateDataInitializer;
@Autowired
private DocumentDataInitializer documentDataInitializer;
@Autowired
private TextDocumentTemplateService documentTemplateService;
@Autowired
private TemplateRepository templateRepository;
@Value("#{'${FileAdminRootPaths}'.split(';')}")
private List<Path> testRootPaths;
@BeforeEach
@AfterEach
public void setUp() {
templateDataInitializer.cleanRepositories();
documentDataInitializer.cleanRepositories();
testRootPaths.stream().map(Path::toFile).forEach(FileUtils::deleteRecursive);
}
@Test
public void getDocumentsByTemplateName_When_TemplateHasDocuments_Expected_CorrectSize() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path template = firstRootPath.resolve(testTemplateName);
final String templateName = template.getFileName().toString();
DocumentDTO document = documentDataInitializer.createData();
Files.createDirectory(firstRootPath);
Files.createFile(template);
templateDataInitializer.createData(document.getId(), templateName, templateName);
List<DocumentDTO> documents = fileService.getDocumentsByTemplatePath(template);
assertEquals(1, documents.size());
}
@Test
public void getDocumentsByTemplateName_When_TemplateHasNotDocuments_Expected_EmptyResult() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path template = firstRootPath.resolve(testTemplateName);
final String templateName = template.getFileName().toString();
Files.createDirectory(firstRootPath);
Files.createFile(template);
templateDataInitializer.createData(templateName);
List<DocumentDTO> documents = fileService.getDocumentsByTemplatePath(template);
assertTrue(documents.isEmpty());
assertEquals(0, documents.size());
}
@Test
public void getDocumentsByTemplateName_When_TemplateNotFileButNameExists_Expected_CorrectSize() throws IOException {
final String testTemplateName = "test";
DocumentDTO document = documentDataInitializer.createData();
templateDataInitializer.createData(document.getId(), testTemplateName, testTemplateName);
List<DocumentDTO> documents = fileService.getDocumentsByTemplatePath(Paths.get(testTemplateName));
assertEquals(1, documents.size());
}
@Test
public void getDocumentsByTemplateName_When_TemplateNotFileButNameNotExists_Expected_CorrectException() {
final String testTemplateName = "fakeTest";
assertThrows(FileAccessDeniedException.class, () -> fileService.getDocumentsByTemplatePath(Paths.get(testTemplateName)));
}
@Test
public void getDocumentsByTemplateName_When_TemplateToOutSideRootDir_Expected_CorrectException() {
final Path pathOutSide = Paths.get(testTemplateName);
final String templateName = pathOutSide.getFileName().toString();
DocumentDTO document = documentDataInitializer.createData();
templateDataInitializer.createData(document.getId(), templateName, templateName);
assertThrows(FileAccessDeniedException.class, () -> fileService.getDocumentsByTemplatePath(pathOutSide));
}
@Test
public void getDocumentsByTemplateName_When_GetImage_Expected_EmptyResult() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path imagePath = firstRootPath.resolve("image.png");
Files.createDirectory(firstRootPath);
Files.createFile(imagePath);
assertTrue(fileService.getDocumentsByTemplatePath(imagePath).isEmpty());
}
@Test
public void getRootPaths_When_PathsCorrect_Expected_CorrectSourceFiles() throws IOException {
final Path firstRootDir = testRootPaths.get(0);
final Path secondRootDir = testRootPaths.get(1);
Files.createDirectory(firstRootDir);
Files.createDirectory(secondRootDir);
final List<SourceFile> files = Arrays.asList(
new SourceFile(firstRootDir.getFileName().toString(), firstRootDir.toString(), DIRECTORY, null),
new SourceFile(secondRootDir.getFileName().toString(), secondRootDir.toString(), DIRECTORY, null)
);
assertEquals(files.size(), fileService.getRootFiles().size());
assertEquals(files, fileService.getRootFiles());
}
@Test
public void getFiles_When_FilesInDirectoryExist_Expected_CorrectFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createFile(pathFile);
final List<SourceFile> files = Collections.singletonList(
new SourceFile(pathFile.getFileName().toString(), pathFile.toString(), FILE, Collections.EMPTY_LIST)
);
final List<SourceFile> foundFiles = fileService.getFiles(pathDir);
assertEquals(files.size(), foundFiles.size());
assertEquals(files, foundFiles);
}
@Test
public void getFiles_When_DirectoryHasFolderAndFile_Expected_CorrectSize() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathDir2 = pathDir.resolve(testDirectoryName2);
Files.createDirectories(pathDir2);
Files.createFile(pathFile);
final List<SourceFile> expectedFiles = Arrays.asList(
new SourceFile(pathDir2.getFileName().toString(), pathDir2.toString(), DIRECTORY, null),
new SourceFile(pathFile.getFileName().toString(), pathFile.toString(), FILE, Collections.EMPTY_LIST)
);
final List<SourceFile> foundFiles = fileService.getFiles(pathDir);
assertEquals(expectedFiles.size(), foundFiles.size());
}
@Test
public void getFiles_When_FilesInDirectoryNotExist_Expected_EmptyResult() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
assertEquals(0, fileService.getFiles(pathDir).size());
}
@Test
public void createFile_WhenFileNameEmpty_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
Files.createDirectories(pathDir);
final Path pathNewFile = pathDir.resolve(" ");
final SourceFile newFile = new SourceFile(
pathNewFile.getFileName().toString(), pathNewFile.toString(), FILE, Collections.EMPTY_LIST
);
assertThrows(EmptyFileNameException.class, () -> fileService.createFile(newFile, false));
}
@Test
public void createFile_WhenFileNotExist_Expected_CreatedFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
Files.createDirectories(pathDir);
final Path pathNewFile = pathDir.resolve(testFileName);
final SourceFile newFile = new SourceFile(pathNewFile.getFileName().toString(), pathNewFile.toString(), FILE, Collections.EMPTY_LIST);
final SourceFile createdFile = fileService.createFile(newFile, false);
assertTrue(Files.exists(pathNewFile));
assertEquals(newFile, createdFile);
}
@Test
public void createFile_When_FileCreateToOutSideRootDir_Expected_CorrectException() throws IOException {
Files.createDirectory(testRootPaths.get(0));
final Path pathFile = Paths.get(testFileName);
final Path pathDir = Paths.get(testFileName);
assertFalse(Files.exists(pathFile));
final SourceFile newFile = new SourceFile(pathFile.getFileName().toString(), pathFile.toString(), FILE, Collections.EMPTY_LIST);
final SourceFile newDir = new SourceFile(pathDir.getFileName().toString(), pathDir.toString(), DIRECTORY, null);
assertThrows(FileAccessDeniedException.class, () -> fileService.createFile(newFile, false));
assertThrows(FileAccessDeniedException.class, () -> fileService.createFile(newDir, true));
assertFalse(Files.exists(pathFile));
assertFalse(Files.exists(pathDir));
}
@Test
public void saveFile_When_FileExistAndOverWrite_Expected_SavedFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
final String testText = "bla-bla-bla";
final SourceFile saved = fileService.saveFile(pathFile, Collections.singletonList(testText), null);
assertNotNull(saved);
assertTrue(Files.exists(Paths.get(saved.getFullPath())));
List<String> line = saved.getContents();
assertEquals(1, line.size());
String savedContent = line.get(0);
assertEquals(testText, savedContent);
}
@Test
public void saveFile_When_FileExistAndNotOverWrite_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile2 = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile2);
final String testText = "bla-bla-bla";
assertThrows(FileAlreadyExistsException.class, () -> fileService.saveFile(
pathFile2, Collections.singletonList(testText), StandardOpenOption.CREATE_NEW));
}
@Test
public void getFile_When_FileExists_Expected_CorrectFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
assertEquals(pathFile.toString(), fileService.getFile(pathFile).toString());
}
@Test
public void getFile_When_PathFileContainsCommandCharacters_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path testPath = Paths.get("../");
final Path testPath2 = Paths.get("./");
final Path testPath3 = Paths.get("/~/");
final Path testPath4 = Paths.get(".././~/../.");
final Path testPath5 = firstRootPath.resolve("../");
final Path testPath6 = firstRootPath.resolve("./");
final Path testPath7 = firstRootPath.resolve("/~/");
final Path testPath8 = firstRootPath.resolve(".././~/../.");
Files.createDirectory(firstRootPath);
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath2));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath3));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath4));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath5));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath7));
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(testPath8));
assertTrue(Files.exists(fileService.getFile(testPath6)));
}
@Test
public void getFile_When_PathFileNotExist_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
assertFalse(Files.exists(pathFile));
assertThrows(NoSuchFileException.class, () -> fileService.getFile(pathFile));
}
@Test
public void getFile_When_PathFileFromToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path outDirRoot = Paths.get(firstRootPath.getParent().toString());
Files.createDirectory(firstRootPath);
assertThrows(FileAccessDeniedException.class, () -> fileService.getFile(outDirRoot));
}
@Test
public void deleteFile_When_FileExists_Expected_Delete() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
fileService.deleteFile(pathFile);
assertFalse(Files.exists(pathFile));
}
@Test
public void deleteDir_When_DirHasFiles_Expected_Delete() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFile2 = pathDir.resolve(testFileName2);
final Path pathDir2 = pathDir.resolve(testDirectoryName2);
final Path pathFile3 = pathDir2.resolve("test" + testFileName2);
Files.createDirectories(pathDir2);
Files.createFile(pathFile);
Files.createFile(pathFile2);
Files.createFile(pathFile3);
fileService.deleteFile(pathDir);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2));
}
@Test
public void deleteFile_When_FileNotExists_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
final String fakeName = "fake.txt";
final Path fakePathFile = pathDir.resolve(fakeName);
assertThrows(NoSuchFileException.class, () -> fileService.deleteFile(fakePathFile));
}
@Test
public void deleteDir_When_DirIsEmpty_Expected_Deleted() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
assertEquals(1, Files.list(firstRootPath).count());
fileService.deleteFile(pathDir);
assertEquals(0, Files.list(firstRootPath).count());
}
@Test
public void copyFile_When_SrcFileExists_Expected_CopyFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathDir2 = firstRootPath.resolve(testDirectoryName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createDirectory(pathDir2);
Files.createFile(pathFile);
assertEquals(0, Files.list(pathDir2).count());
fileService.copyFile(Collections.singletonList(pathFile), pathDir2);
assertEquals(1, Files.list(pathDir).count());
assertEquals(1, Files.list(pathDir2).count());
}
@Test
public void copyFiles_When_FilesExist_Expected_CopyFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileInDir = pathDir.resolve(testFileName);
final Path pathFile2InDir = pathDir.resolve(testFileName2);
final Path pathDirTarget = firstRootPath.resolve(testDirectoryName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createDirectory(pathDirTarget);
List<Path> paths = new ArrayList<>();
paths.add(Files.createFile(pathFileInDir));
paths.add(Files.createFile(pathFile2InDir));
assertEquals(2, Files.list(pathDir).count());
assertEquals(0, Files.list(pathDirTarget).count());
fileService.copyFile(paths, pathDirTarget);
assertEquals(2, Files.list(pathDir).count());
assertEquals(2, Files.list(pathDirTarget).count());
}
@Test
public void copyFile_When_FileCopyToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeFile2 = Paths.get("test");
assertFalse(Files.exists(pathDir));
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createFile(pathFile);
assertThrows(FileAccessDeniedException.class, () -> fileService.copyFile(
Collections.singletonList(pathFile), pathFakeFile2));
assertThrows(FileAccessDeniedException.class, () -> fileService.copyFile(Collections.singletonList(
pathFakeFile2), pathFile));
}
@Test
public void moveFiles_When_FilesExist_Expected_MoveFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileInDir = pathDir.resolve(testFileName);
final Path pathFile2InDir = pathDir.resolve(testFileName2);
final Path pathDirTarget = firstRootPath.resolve(testDirectoryName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createDirectory(pathDirTarget);
List<Path> paths = new ArrayList<>();
paths.add(Files.createFile(pathFileInDir));
paths.add(Files.createFile(pathFile2InDir));
assertEquals(2, Files.list(pathDir).count());
assertEquals(0, Files.list(pathDirTarget).count());
fileService.moveFile(paths, pathDirTarget);
assertEquals(0, Files.list(pathDir).count());
assertEquals(2, Files.list(pathDirTarget).count());
}
@Test
public void moveFile_When_FileExist_Expected_moveCorrectFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2 = firstRootPath.resolve(testDirectoryName2);
final Path pathFileByDir = firstRootPath.resolve(testFileName);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2));
Files.createDirectories(pathDir2);
Files.createDirectory(pathDir);
List<Path> paths = Collections.singletonList(Files.createFile(pathFileByDir));
assertEquals(3, fileService.getFiles(firstRootPath).size());
assertEquals(0, fileService.getFiles(pathDir).size());
fileService.moveFile(paths, pathDir);
assertFalse(Files.exists(pathFileByDir));
assertEquals(2, fileService.getFiles(firstRootPath).size());
assertEquals(1, fileService.getFiles(pathDir).size());
}
@Test
public void moveFile_When_FileExist_Expected_RenameFile() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileByDir = firstRootPath.resolve(testFileName);
final Path pathFile2ByDir = pathDir.resolve(testFileName2);
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFileByDir);
assertEquals(2, fileService.getFiles(firstRootPath).size());
assertEquals(0, fileService.getFiles(pathDir).size());
SourceFile moved = fileService.moveFile(pathFileByDir, pathFile2ByDir);
assertFalse(Files.exists(pathFileByDir));
assertEquals(1, fileService.getFiles(firstRootPath).size());
assertEquals(1, fileService.getFiles(pathDir).size());
assertNotEquals(pathFileByDir.getFileName(), moved.getFileName());
}
@Test
public void moveFile_When_FilesMoveToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeDir = Paths.get("outSideDir");
final Path pathFakeFile2 = pathFakeDir.resolve(testRootPaths.get(0).getFileName());
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
List<Path> paths = Collections.singletonList(Files.createFile(pathFile));
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(paths, pathFakeFile2));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
assertThrows(FileAccessDeniedException.class,
() -> fileService.moveFile(Collections.singletonList(pathFakeFile2), pathFile));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
}
@Test
public void moveFile_When_FileMoveToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeDir = Paths.get("outSideDir");
final Path pathFakeFile2 = pathFakeDir.resolve(testRootPaths.get(0).getFileName());
Files.createDirectories(pathDir);
Files.createFile(pathFile);
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(pathFile, pathFakeFile2));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(pathFakeFile2, pathFile));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
}
@Test
public void moveFile_When_FileRenameToOutSideRootDir_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFile = pathDir.resolve(testFileName);
final Path pathFakeDir = Paths.get("outSideDir");
final Path pathFakeFile2 = pathFakeDir.resolve(testRootPaths.get(0).getFileName());
assertFalse(Files.exists(pathDir));
Files.createDirectories(pathDir);
Files.createFile(pathFile);
assertThrows(FileAccessDeniedException.class, () -> fileService.moveFile(pathFile, pathFakeFile2));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
assertThrows(FileAccessDeniedException.class,
() -> fileService.moveFile(pathFakeFile2, pathFile));
assertTrue(Files.exists(pathFile));
assertFalse(Files.exists(pathFakeFile2));
}
@Test
public void getFiles_WhenFilesHaveSubFiles_Expected_CorrectSize() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2ByDir = pathDir.resolve(testDirectoryName2);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathFile2ByDir2 = pathDir2ByDir.resolve(testFileName2);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2ByDir));
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createDirectory(pathDir2ByDir);
Files.createFile(pathFileByDir);
Files.createFile(pathFile2ByDir2);
assertFalse(fileService.getFiles(pathDir).isEmpty());
assertEquals(2, fileService.getFiles(pathDir).size());
}
@Test
public void getFiles_When_OrderNotCorrect_Expected_CorrectOrder() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
Files.createDirectory(firstRootPath);
final Path file1 = firstRootPath.resolve(testFileName);
final Path file2 = firstRootPath.resolve(testFileName2);
final Path directory1 = firstRootPath.resolve(testDirectoryName);
final Path directory2 = firstRootPath.resolve(testDirectoryName2);
Files.createFile(file1);
Files.createDirectory(directory1);
Files.createFile(file2);
Files.createDirectory(directory2);
List<SourceFile> receivedFiles = fileService.getFiles(firstRootPath);
assertEquals(DIRECTORY, receivedFiles.get(0).getFileType());
assertEquals(DIRECTORY, receivedFiles.get(1).getFileType());
assertEquals(FILE, receivedFiles.get(2).getFileType());
assertEquals(FILE, receivedFiles.get(3).getFileType());
}
@Test
public void copyFiles_When_FilesExists_Expected_CopyFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathFile2ByDir = pathDir.resolve(testFileName2);
final Path targetDir = pathDir.resolve(testDirectoryName2);
Files.createDirectories(pathDir);
Files.createDirectory(targetDir);
List<Path> paths = new ArrayList<>();
paths.add(Files.createFile(pathFileByDir));
paths.add(Files.createFile(pathFile2ByDir));
assertEquals(3, Files.list(pathDir).count());
assertEquals(0, Files.list(targetDir).count());
fileService.copyFile(paths, targetDir);
assertEquals(3, Files.list(pathDir).count());
assertEquals(2, Files.list(targetDir).count());
}
@Test
public void copyDirectory_When_DirectoryExists_Expected_CopyDirectory() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2ByDir = pathDir.resolve(testDirectoryName2);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathDir3 = firstRootPath.resolve(testDirectoryName3);
assertFalse(Files.exists(firstRootPath));
Files.createDirectories(pathDir2ByDir);
Files.createDirectory(pathDir3);
Files.createFile(pathFileByDir);
fileService.copyFile(Collections.singletonList(pathDir2ByDir), pathDir3);
assertTrue(Files.exists(pathDir2ByDir));
assertTrue(Files.exists(pathDir3));
assertEquals(1, Files.list(pathDir3).count());
}
@Test
public void moveDirectory_When_DirectoryNotEmpty_Expected_moveCorrectDirectory() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2ByDir = pathDir.resolve(testDirectoryName3);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathDir3 = firstRootPath.resolve(testDirectoryName3);
assertFalse(Files.exists(firstRootPath));
Files.createDirectories(pathDir2ByDir);
Files.createDirectory(pathDir3);
Files.createFile(pathFileByDir);
fileService.moveFile(Collections.singletonList(pathDir2ByDir), pathDir3);
assertFalse(Files.exists(pathDir2ByDir));
assertTrue(Files.exists(pathDir3));
}
@Test
public void moveDirectory_When_SelectedTwoDirectories_Expected_moveDirectories() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2 = pathDir.resolve(testDirectoryName2);
final Path pathDirTarget = firstRootPath.resolve(testDirectoryName3);
final Path pathFileByDir = pathDir.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createDirectory(pathDir2);
Files.createDirectory(pathDirTarget);
Files.createFile(pathFileByDir);
List<Path> testPaths = new ArrayList<>();
testPaths.add(pathDir2);
testPaths.add(pathDir);
assertEquals(2, Files.list(firstRootPath).count());
assertEquals(0, Files.list(pathDirTarget).count());
fileService.moveFile(testPaths, pathDirTarget);
assertFalse(Files.exists(pathDir));
assertFalse(Files.exists(pathDir2));
assertEquals(1, Files.list(firstRootPath).count());
assertEquals(2, Files.list(pathDirTarget).count());
}
@Test
public void moveFiles_When_FilesExist_Expected_moveFiles() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathDir = firstRootPath.resolve(testDirectoryName);
final Path pathDir2 = firstRootPath.resolve(testDirectoryName2);
final Path pathFileByDir = pathDir.resolve(testFileName);
final Path pathFile3ByDir = pathDir.resolve("bla" + testFileName2);
Files.createDirectory(firstRootPath);
Files.createDirectory(pathDir);
Files.createDirectory(pathDir2);
List<Path> src = new ArrayList<>();
src.add(Files.createDirectory(pathFileByDir));
src.add(Files.createDirectory(pathFile3ByDir));
assertEquals(0, Files.list(pathDir2).count());
fileService.moveFile(src, pathDir2);
assertFalse(Files.exists(pathFileByDir));
assertFalse(Files.exists(pathFile3ByDir));
assertEquals(2, Files.list(firstRootPath).count());
assertEquals(2, Files.list(pathDir2).count());
}
@Test
public void renameFile_When_FileNameEmpty_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathTest = firstRootPath.resolve(testDirectoryName);
final Path pathTarget = firstRootPath.resolve(" ");
Files.createDirectories(pathTest);
assertThrows(EmptyFileNameException.class, () -> fileService.moveFile(pathTest, pathTarget));
assertFalse(Files.exists(pathTarget));
}
@Test
public void renameTemplateFile_When_TemplateUseDocuments_Expected_Rename() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathTest = firstRootPath.resolve(testDirectoryName);
final Path pathTarget = firstRootPath.resolve(testDirectoryName2);
final String namePathTest = pathTest.getFileName().toString();
final String namePathTarget = pathTarget.getFileName().toString();
final DocumentDTO document = documentDataInitializer.createData();
final Template template = templateDataInitializer.createData(namePathTest);
templateDataInitializer.createData(document.getId(), template.getName(), template.getName());
Files.createDirectories(pathTest);
fileService.moveFile(pathTest, pathTarget);
final List<TextDocumentTemplateDTO> docsByTemplNameTarget = documentTemplateService.getByTemplateName(namePathTarget);
assertTrue(documentTemplateService.getByTemplateName(namePathTest).isEmpty());
assertFalse(docsByTemplNameTarget.isEmpty());
assertEquals(1, docsByTemplNameTarget.size());
assertFalse(Files.exists(pathTest));
}
@Test
public void saveTemplateFileInGroup_When_templateFileNotExistsInGroup_Expected_ChangeTemplateGroup() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathFileByRoot = firstRootPath.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createFile(pathFileByRoot);
final String originalFileName = FilenameUtils.removeExtension(pathFileByRoot.getFileName().toString());
Template template = templateDataInitializer.createData(originalFileName);
TemplateGroup testGroup = templateDataInitializer.createData("testGroup", 2, false);
assertNull(template.getTemplateGroup());
Template savedTemplate = fileService.saveTemplateInGroup(pathFileByRoot, testGroup.getName());
assertNotNull(savedTemplate.getTemplateGroup());
assertEquals(testGroup.getName(), savedTemplate.getTemplateGroup().getName());
}
@Test
public void saveTemplateFileInGroup_When_templateFileExistsInGroup_Expected_ChangeTemplateGroup() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathFileByRoot = firstRootPath.resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createFile(pathFileByRoot);
final String originalFileName = FilenameUtils.removeExtension(pathFileByRoot.getFileName().toString());
TemplateJPA template = new TemplateJPA(templateDataInitializer.createData(originalFileName));
List<TemplateGroup> testGroups = templateDataInitializer.createTemplateGroups(2);
TemplateGroup testTemplateGroup = testGroups.get(0);
template.setTemplateGroup(testTemplateGroup);
TemplateJPA saved = templateRepository.save(template);
assertNotNull(saved.getTemplateGroup());
assertEquals(testTemplateGroup.getName(), saved.getTemplateGroup().getName());
final TemplateGroup expectedTemplateGroup = testGroups.get(1);
final Template changedTemplate = fileService.saveTemplateInGroup(pathFileByRoot, expectedTemplateGroup.getName());
assertNotNull(changedTemplate.getTemplateGroup());
assertNotEquals(testTemplateGroup.getName(), changedTemplate.getTemplateGroup().getName());
}
@Test
public void saveTemplateFileInGroup_When_templateFileNameEmpty_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path emptyFileName = firstRootPath.resolve(" ");
Files.createDirectory(firstRootPath);
final TemplateGroup testGroup = templateDataInitializer.createData(
"testGroup", 2, false);
assertThrows(EmptyFileNameException.class, () -> fileService.saveTemplateInGroup(emptyFileName, testGroup.getName()));
assertEquals(2, testGroup.getTemplates().size());
}
@Test
public void saveTemplateFileInGroup_When_templateFileInOutSideRoot_Expected_CorrectException() throws IOException {
final Path firstRootPath = testRootPaths.get(0);
final Path pathTemplateFile = firstRootPath.getParent().resolve(testFileName);
Files.createDirectory(firstRootPath);
Files.createFile(pathTemplateFile);
final TemplateGroup testGroup = templateDataInitializer.createData(
"testGroup", 2, false);
assertThrows(FileAccessDeniedException.class, () -> fileService.saveTemplateInGroup(pathTemplateFile, testGroup.getName()));
Files.delete(pathTemplateFile);
}
}
|
Issue IMCMS-332: New design to super admin page: files tab
- Change path
|
src/test/java/com/imcode/imcms/domain/service/api/FileServiceTest.java
|
Issue IMCMS-332: New design to super admin page: files tab - Change path
|
|
Java
|
agpl-3.0
|
00cf5b2543e9ef411b76b84b4a1a003bb7450198
| 0
|
buremba/rakam,rakam-io/rakam,buremba/rakam,rakam-io/rakam,rakam-io/rakam,buremba/rakam,buremba/rakam,buremba/rakam
|
package org.rakam.analysis;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.rakam.EventBuilder;
import org.rakam.analysis.FunnelQueryExecutor.FunnelStep;
import org.rakam.analysis.FunnelQueryExecutor.FunnelWindow;
import org.rakam.analysis.metadata.Metastore;
import org.rakam.collection.Event;
import org.rakam.plugin.EventStore;
import org.rakam.report.QueryResult;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.time.Instant;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static com.google.common.collect.ImmutableList.of;
import static java.time.ZoneOffset.UTC;
import static org.rakam.analysis.FunnelQueryExecutor.FunnelType.APPROXIMATE;
import static org.rakam.analysis.FunnelQueryExecutor.FunnelType.NORMAL;
import static org.rakam.analysis.FunnelQueryExecutor.FunnelType.ORDERED;
import static org.rakam.analysis.FunnelQueryExecutor.WindowType.DAY;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public abstract class TestFunnelQueryExecutor {
private static final int SCALE_FACTOR = 10;
private static final String PROJECT_NAME = TestFunnelQueryExecutor.class.getName().replace(".", "_").toLowerCase();
@BeforeSuite
public void setup() throws Exception {
EventBuilder builder = new EventBuilder(PROJECT_NAME, getMetastore());
getMetastore().createProject(PROJECT_NAME);
for (int cIdx = 0; cIdx < 4; cIdx ++) {
final int finalCIdx = cIdx;
List<Event> events = IntStream.range(0, SCALE_FACTOR).mapToObj(i -> builder.createEvent("test" + finalCIdx,
ImmutableMap.<String, Object>builder()
.put("teststr", "test" + (i % 2))
.put("_user", "test" + (i % 3))
.put("_time", Instant.ofEpochSecond((i * 100) + finalCIdx)).build())).collect(Collectors.toList());
getEventStore().storeBatch(events);
}
}
@DataProvider(name = "types")
public static Object[][] hashEnabledValuesProvider()
{
return new Object[][] {
{ORDERED},
{NORMAL}
};
}
@AfterSuite
public void destroy()
throws InterruptedException
{
getMetastore().deleteProject(PROJECT_NAME);
}
public abstract EventStore getEventStore();
public abstract Metastore getMetastore();
public abstract FunnelQueryExecutor getFunnelQueryExecutor();
@Test(dataProvider = "types")
public void testSingleStep(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME, of(new FunnelStep("test0", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", 3L)));
}
@Test
public void testSingleStepApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME, of(new FunnelStep("test0", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), FunnelQueryExecutor.FunnelType.APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testMultipleSteps(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", 3L), of("Step 2", 3L), of("Step 3", 3L)));
}
@Test
public void testMultipleStepsApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testMultipleStepsGrouping(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(ImmutableSet.copyOf(query.getResult()), ImmutableSet.of(
of("Step 1", "test0", 3L), of("Step 1", "test1", 3L),
of("Step 2", "test0", 3L), of("Step 2", "test1", 3L),
of("Step 3", "test0", 3L), of("Step 3", "test1", 3L)));
}
@Test
public void testMultipleStepsGroupingApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testDimension(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(ImmutableSet.copyOf(query.getResult()),
ImmutableSet.of(
of("Step 1", "test0", 3L),
of("Step 1", "test1", 3L),
of("Step 2", "test0", 3L),
of("Step 2", "test1", 3L)));
}
@Test
public void testDimensionApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), NORMAL).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testFilter(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", "test1", 3L), of("Step 2", "test1", 3L)));
}
@Test
public void testFilterApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test
public void testSegment() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("_time"),
Optional.of(FunnelQueryExecutor.FunnelTimestampSegments.DAY_OF_MONTH.value()),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), NORMAL).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", "1th day", 3L), of("Step 2", "1th day", 3L)));
}
@Test
public void testSegmentOrdered() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("_time"),
Optional.of(FunnelQueryExecutor.FunnelTimestampSegments.DAY_OF_MONTH.value()),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), ORDERED).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", "1th day", 3L), of("Step 2", "1th day", 3L)));
}
@Test
public void testSegmentApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("_time"),
Optional.of(FunnelQueryExecutor.FunnelTimestampSegments.DAY_OF_MONTH.value()),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
assertTrue( query.getResult().equals(of(of("Step 0", "1th day", 3), of("Step 1", "1th day", 3 ))) ||
query.getResult().equals(of(of("Step 1", "1th day", 3L), of("Step 2", "1th day", 3L ))));
}
@Test
public void testSameConnectorAndDimension() throws Exception {
}
@Test
public void testLongConnector() throws Exception {
}
}
|
rakam-spi/src/test/java/org/rakam/analysis/TestFunnelQueryExecutor.java
|
package org.rakam.analysis;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.rakam.EventBuilder;
import org.rakam.analysis.FunnelQueryExecutor.FunnelStep;
import org.rakam.analysis.FunnelQueryExecutor.FunnelWindow;
import org.rakam.analysis.metadata.Metastore;
import org.rakam.collection.Event;
import org.rakam.plugin.EventStore;
import org.rakam.report.QueryResult;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.time.Instant;
import java.time.LocalDate;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static com.google.common.collect.ImmutableList.of;
import static java.time.ZoneOffset.UTC;
import static org.rakam.analysis.FunnelQueryExecutor.FunnelType.APPROXIMATE;
import static org.rakam.analysis.FunnelQueryExecutor.FunnelType.NORMAL;
import static org.rakam.analysis.FunnelQueryExecutor.FunnelType.ORDERED;
import static org.rakam.analysis.FunnelQueryExecutor.WindowType.DAY;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
public abstract class TestFunnelQueryExecutor {
private static final int SCALE_FACTOR = 10;
private static final String PROJECT_NAME = TestFunnelQueryExecutor.class.getName().replace(".", "_").toLowerCase();
@BeforeSuite
public void setup() throws Exception {
EventBuilder builder = new EventBuilder(PROJECT_NAME, getMetastore());
getMetastore().createProject(PROJECT_NAME);
for (int cIdx = 0; cIdx < 4; cIdx ++) {
final int finalCIdx = cIdx;
List<Event> events = IntStream.range(0, SCALE_FACTOR).mapToObj(i -> builder.createEvent("test" + finalCIdx,
ImmutableMap.<String, Object>builder()
.put("teststr", "test" + (i % 2))
.put("_user", "test" + (i % 3))
.put("_time", Instant.ofEpochSecond((i * 100) + finalCIdx)).build())).collect(Collectors.toList());
getEventStore().storeBatch(events);
}
}
@DataProvider(name = "types")
public static Object[][] hashEnabledValuesProvider()
{
return new Object[][] {
{ORDERED},
{NORMAL}
};
}
@AfterSuite
public void destroy()
throws InterruptedException
{
getMetastore().deleteProject(PROJECT_NAME);
}
public abstract EventStore getEventStore();
public abstract Metastore getMetastore();
public abstract FunnelQueryExecutor getFunnelQueryExecutor();
@Test(dataProvider = "types")
public void testSingleStep(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME, of(new FunnelStep("test0", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", 3L)));
}
@Test
public void testSingleStepApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME, of(new FunnelStep("test0", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), FunnelQueryExecutor.FunnelType.APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testMultipleSteps(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", 3L), of("Step 2", 3L), of("Step 3", 3L)));
}
@Test
public void testMultipleStepsApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.empty(),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testMultipleStepsGrouping(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(ImmutableSet.copyOf(query.getResult()), ImmutableSet.of(
of("Step 1", "test0", 3L), of("Step 1", "test1", 3L),
of("Step 2", "test0", 3L), of("Step 2", "test1", 3L),
of("Step 3", "test0", 3L), of("Step 3", "test1", 3L)));
}
@Test
public void testMultipleStepsGroupingApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null), new FunnelStep("test2", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testDimension(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(ImmutableSet.copyOf(query.getResult()),
ImmutableSet.of(
of("Step 1", "test0", 3L),
of("Step 1", "test1", 3L),
of("Step 2", "test0", 3L),
of("Step 2", "test1", 3L)));
}
@Test
public void testDimensionApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", null), new FunnelStep("test1", null)),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), NORMAL).getResult().join();
assertFalse(query.isFailed());
}
@Test(dataProvider = "types")
public void testFilter(FunnelQueryExecutor.FunnelType funnelType) throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.of(new FunnelWindow(30, DAY)), UTC,
Optional.empty(), funnelType).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", "test1", 3L), of("Step 2", "test1", 3L)));
}
@Test
public void testFilterApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("teststr"),
Optional.empty(),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
}
@Test
public void testSegment() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("_time"),
Optional.of(FunnelQueryExecutor.FunnelTimestampSegments.DAY_OF_MONTH.value()),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), NORMAL).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", "1th day", 3L), of("Step 2", "1th day", 3L)));
}
@Test
public void testSegmentOrdered() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("_time"),
Optional.of(FunnelQueryExecutor.FunnelTimestampSegments.DAY_OF_MONTH.value()),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), ORDERED).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", "1th day", 3L), of("Step 2", "1th day", 3L)));
}
@Test
public void testSegmentApproximate() throws Exception {
QueryResult query = getFunnelQueryExecutor().query(PROJECT_NAME,
of(new FunnelStep("test0", Optional.of("teststr = 'test1'")), new FunnelStep("test1", Optional.of("teststr = 'test1'"))),
Optional.of("_time"),
Optional.of(FunnelQueryExecutor.FunnelTimestampSegments.DAY_OF_MONTH.value()),
LocalDate.ofEpochDay(0),
LocalDate.ofEpochDay(SCALE_FACTOR), Optional.empty(), UTC,
Optional.empty(), APPROXIMATE).getResult().join();
assertFalse(query.isFailed());
assertEquals(query.getResult(), of(of("Step 1", "1th day", 3L), of("Step 2", "1th day", 3L)));
}
@Test
public void testSameConnectorAndDimension() throws Exception {
}
@Test
public void testLongConnector() throws Exception {
}
}
|
PrestoApproximate test failed due to different implementation
|
rakam-spi/src/test/java/org/rakam/analysis/TestFunnelQueryExecutor.java
|
PrestoApproximate test failed due to different implementation
|
|
Java
|
lgpl-2.1
|
71f45f765962b76c1a15e8764e9d724e72f28b88
| 0
|
spotbugs/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,sewe/spotbugs,sewe/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,sewe/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs
|
/*
* FindBugs - Find bugs in Java programs
* Copyright (C) 2004-2006 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs.detect;
import org.apache.bcel.classfile.Code;
import org.apache.bcel.classfile.LineNumberTable;
import edu.umd.cs.findbugs.BugAccumulator;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.LocalVariableAnnotation;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.ba.SignatureParser;
import edu.umd.cs.findbugs.ba.XField;
import edu.umd.cs.findbugs.bcel.OpcodeStackDetector;
public class FindSelfComparison extends OpcodeStackDetector {
final BugAccumulator bugAccumulator;
public FindSelfComparison(BugReporter bugReporter) {
this.bugAccumulator = new BugAccumulator(bugReporter);
}
String f;
String className;
int state;
int putFieldRegister;
int putFieldPC = Integer.MIN_VALUE;
OpcodeStack.Item putFieldObj;
OpcodeStack.Item putFieldValue;
XField putFieldXField;
@Override
public void visit(Code obj) {
// System.out.println(getFullyQualifiedMethodName());
whichRegister = -1;
registerLoadCount = 0;
state = 0;
resetDoubleAssignmentState();
super.visit(obj);
resetDoubleAssignmentState();
bugAccumulator.reportAccumulatedBugs();
}
/**
*
*/
private void resetDoubleAssignmentState() {
putFieldPC = Integer.MIN_VALUE;
putFieldXField = null;
putFieldValue = null;
putFieldObj = null;
}
@Override
public void sawBranchTo(int target) {
resetDoubleAssignmentState();
}
@Override
public void sawOpcode(int seen) {
// System.out.println(getPC() + " " + OPCODE_NAMES[seen] + " " + whichRegister + " " + registerLoadCount);
checkPUTFIELD: if (seen == PUTFIELD) {
OpcodeStack.Item obj = stack.getStackItem(1);
OpcodeStack.Item value = stack.getStackItem(0);
XField f = getXFieldOperand();
if (putFieldPC + 10 > getPC()
&& f.equals(putFieldXField)
&& obj.equals(putFieldObj)) {
LineNumberTable table = getCode().getLineNumberTable();
if (table != null) {
int first = table.getSourceLine(putFieldPC);
int second = table.getSourceLine(getPC());
if (first+1 < second)
break checkPUTFIELD;
} else if (putFieldPC + 4 < getPC())
break checkPUTFIELD;
int priority = value.equals(putFieldValue) ? NORMAL_PRIORITY : HIGH_PRIORITY;
bugAccumulator.accumulateBug(new BugInstance(this, "SA_FIELD_DOUBLE_ASSIGNMENT", priority)
.addClassAndMethod(this)
.addReferencedField(this), this);
}
putFieldPC = getPC();
putFieldXField = f;
putFieldObj = obj;
putFieldValue = value;
} else if (isReturn(seen))
resetDoubleAssignmentState();
if (false) switch (state) {
case 0:
if (seen == DUP_X1) state = 4;
break;
case 4:
if (seen == PUTFIELD) {
f = getRefConstantOperand();
className = getClassConstantOperand();
OpcodeStack.Item item1 = stack.getStackItem(1);
putFieldRegister = item1.getRegisterNumber();
if (putFieldRegister >= 0)
state = 5;
else state = 0;
} else
state = 0;
break;
case 5:
if (seen == PUTFIELD && getRefConstantOperand().equals(f) && getClassConstantOperand().equals(className)) {
OpcodeStack.Item item1 = stack.getStackItem(1);
if (putFieldRegister == item1.getRegisterNumber())
bugAccumulator.accumulateBug(new BugInstance(this, "SA_FIELD_DOUBLE_ASSIGNMENT", NORMAL_PRIORITY)
.addClassAndMethod(this)
.addReferencedField(this), this);
}
state = 0;
break;
}
switch (seen) {
case INVOKEVIRTUAL:
case INVOKEINTERFACE:
if (getClassName().toLowerCase().indexOf("test") >= 0)
break;
if (getMethodName().toLowerCase().indexOf("test") >= 0)
break;
if (getSuperclassName().toLowerCase().indexOf("test") >= 0)
break;
if (getNextOpcode() == POP)
break;
String name = getNameConstantOperand();
if (name.equals("equals") || name.equals("compareTo")) {
String sig = getSigConstantOperand();
SignatureParser parser = new SignatureParser(sig);
if (parser.getNumParameters() == 1
&& (name.equals("equals") && sig.endsWith(";)Z") || name.equals("compareTo") && sig.endsWith(";)I")))
checkForSelfOperation(seen, "COMPARISON");
}
break;
case LOR:
case LAND:
case LXOR:
case LSUB:
case IOR:
case IAND:
case IXOR:
case ISUB:
checkForSelfOperation(seen, "COMPUTATION");
break;
case FCMPG:
case DCMPG:
case DCMPL:
case FCMPL:
break;
case LCMP:
case IF_ACMPEQ:
case IF_ACMPNE:
case IF_ICMPNE:
case IF_ICMPEQ:
case IF_ICMPGT:
case IF_ICMPLE:
case IF_ICMPLT:
case IF_ICMPGE:
checkForSelfOperation(seen, "COMPARISON");
}
if (isRegisterLoad() && seen != IINC) {
if (getRegisterOperand() == whichRegister) registerLoadCount++;
else {
whichRegister = getRegisterOperand();
registerLoadCount = 1;
}
} else {
whichRegister = -1;
registerLoadCount = 0;
}
}
int whichRegister;
int registerLoadCount;
private void checkForSelfOperation(int opCode, String op) {
{
OpcodeStack.Item item0 = stack.getStackItem(0);
OpcodeStack.Item item1 = stack.getStackItem(1);
if (item0.getSignature().equals("D")
|| item0.getSignature().equals("F"))
return;
if (item1.getSignature().equals("D")
|| item1.getSignature().equals("F"))
return;
XField field0 = item0.getXField();
XField field1 = item1.getXField();
int fr0 = item0.getFieldLoadedFromRegister();
int fr1 = item1.getFieldLoadedFromRegister();
if (field0 != null && field0.equals(field1) && fr0 != -1 && fr0 == fr1)
bugAccumulator.accumulateBug(new BugInstance(this,
"SA_FIELD_SELF_" + op, NORMAL_PRIORITY)
.addClassAndMethod(this).addField(field0), this);
else if (opCode == IXOR && item0.equals(item1)) {
LocalVariableAnnotation localVariableAnnotation = LocalVariableAnnotation
.getLocalVariableAnnotation(this, item0);
if (localVariableAnnotation != null)
bugAccumulator.accumulateBug(new BugInstance(this,
"SA_LOCAL_SELF_" + op, HIGH_PRIORITY)
.addClassAndMethod(this).add(
localVariableAnnotation),this);
} else if (opCode == ISUB && registerLoadCount >= 2) { // let FindSelfComparison2 report this; more accurate
bugAccumulator.accumulateBug(new BugInstance(this,
"SA_LOCAL_SELF_" + op, (opCode == ISUB || opCode == LSUB || opCode == INVOKEINTERFACE || opCode == INVOKEVIRTUAL) ? NORMAL_PRIORITY : HIGH_PRIORITY)
.addClassAndMethod(this).add(
LocalVariableAnnotation
.getLocalVariableAnnotation(
getMethod(), whichRegister, getPC(),
getPC() - 1)),this);
}
}
}
}
|
findbugs/src/java/edu/umd/cs/findbugs/detect/FindSelfComparison.java
|
/*
* FindBugs - Find bugs in Java programs
* Copyright (C) 2004-2006 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs.detect;
import org.apache.bcel.classfile.Code;
import edu.umd.cs.findbugs.BugAccumulator;
import edu.umd.cs.findbugs.BugInstance;
import edu.umd.cs.findbugs.BugReporter;
import edu.umd.cs.findbugs.LocalVariableAnnotation;
import edu.umd.cs.findbugs.OpcodeStack;
import edu.umd.cs.findbugs.ba.SignatureParser;
import edu.umd.cs.findbugs.ba.XField;
import edu.umd.cs.findbugs.bcel.OpcodeStackDetector;
public class FindSelfComparison extends OpcodeStackDetector {
final BugAccumulator bugAccumulator;
public FindSelfComparison(BugReporter bugReporter) {
this.bugAccumulator = new BugAccumulator(bugReporter);
}
String f;
String className;
int state;
int putFieldRegister;
int putFieldPC = Integer.MIN_VALUE;
OpcodeStack.Item putFieldObj;
OpcodeStack.Item putFieldValue;
XField putFieldXField;
@Override
public void visit(Code obj) {
whichRegister = -1;
registerLoadCount = 0;
state = 0;
resetDoubleAssignmentState();
super.visit(obj);
resetDoubleAssignmentState();
bugAccumulator.reportAccumulatedBugs();
}
/**
*
*/
private void resetDoubleAssignmentState() {
putFieldPC = Integer.MIN_VALUE;
putFieldXField = null;
putFieldValue = null;
putFieldObj = null;
}
@Override
public void sawBranchTo(int target) {
resetDoubleAssignmentState();
}
@Override
public void sawOpcode(int seen) {
// System.out.println(getPC() + " " + OPCODE_NAMES[seen] + " " + whichRegister + " " + registerLoadCount);
if (seen == PUTFIELD) {
OpcodeStack.Item obj = stack.getStackItem(1);
OpcodeStack.Item value = stack.getStackItem(0);
XField f = getXFieldOperand();
if (putFieldPC + 10 > getPC()
&& f.equals(putFieldXField)
&& obj.equals(putFieldObj)) {
int priority = value.equals(putFieldValue) ? NORMAL_PRIORITY : HIGH_PRIORITY;
bugAccumulator.accumulateBug(new BugInstance(this, "SA_FIELD_DOUBLE_ASSIGNMENT", priority)
.addClassAndMethod(this)
.addReferencedField(this), this);
}
putFieldPC = getPC();
putFieldXField = f;
putFieldObj = obj;
putFieldValue = value;
} else if (isReturn(seen))
resetDoubleAssignmentState();
if (false) switch (state) {
case 0:
if (seen == DUP_X1) state = 4;
break;
case 4:
if (seen == PUTFIELD) {
f = getRefConstantOperand();
className = getClassConstantOperand();
OpcodeStack.Item item1 = stack.getStackItem(1);
putFieldRegister = item1.getRegisterNumber();
if (putFieldRegister >= 0)
state = 5;
else state = 0;
} else
state = 0;
break;
case 5:
if (seen == PUTFIELD && getRefConstantOperand().equals(f) && getClassConstantOperand().equals(className)) {
OpcodeStack.Item item1 = stack.getStackItem(1);
if (putFieldRegister == item1.getRegisterNumber())
bugAccumulator.accumulateBug(new BugInstance(this, "SA_FIELD_DOUBLE_ASSIGNMENT", NORMAL_PRIORITY)
.addClassAndMethod(this)
.addReferencedField(this), this);
}
state = 0;
break;
}
switch (seen) {
case INVOKEVIRTUAL:
case INVOKEINTERFACE:
if (getClassName().toLowerCase().indexOf("test") >= 0)
break;
if (getMethodName().toLowerCase().indexOf("test") >= 0)
break;
if (getSuperclassName().toLowerCase().indexOf("test") >= 0)
break;
if (getNextOpcode() == POP)
break;
String name = getNameConstantOperand();
if (name.equals("equals") || name.equals("compareTo")) {
String sig = getSigConstantOperand();
SignatureParser parser = new SignatureParser(sig);
if (parser.getNumParameters() == 1
&& (name.equals("equals") && sig.endsWith(";)Z") || name.equals("compareTo") && sig.endsWith(";)I")))
checkForSelfOperation(seen, "COMPARISON");
}
break;
case LOR:
case LAND:
case LXOR:
case LSUB:
case IOR:
case IAND:
case IXOR:
case ISUB:
checkForSelfOperation(seen, "COMPUTATION");
break;
case FCMPG:
case DCMPG:
case DCMPL:
case FCMPL:
break;
case LCMP:
case IF_ACMPEQ:
case IF_ACMPNE:
case IF_ICMPNE:
case IF_ICMPEQ:
case IF_ICMPGT:
case IF_ICMPLE:
case IF_ICMPLT:
case IF_ICMPGE:
checkForSelfOperation(seen, "COMPARISON");
}
if (isRegisterLoad() && seen != IINC) {
if (getRegisterOperand() == whichRegister) registerLoadCount++;
else {
whichRegister = getRegisterOperand();
registerLoadCount = 1;
}
} else {
whichRegister = -1;
registerLoadCount = 0;
}
}
int whichRegister;
int registerLoadCount;
private void checkForSelfOperation(int opCode, String op) {
{
OpcodeStack.Item item0 = stack.getStackItem(0);
OpcodeStack.Item item1 = stack.getStackItem(1);
if (item0.getSignature().equals("D")
|| item0.getSignature().equals("F"))
return;
if (item1.getSignature().equals("D")
|| item1.getSignature().equals("F"))
return;
XField field0 = item0.getXField();
XField field1 = item1.getXField();
int fr0 = item0.getFieldLoadedFromRegister();
int fr1 = item1.getFieldLoadedFromRegister();
if (field0 != null && field0.equals(field1) && fr0 != -1 && fr0 == fr1)
bugAccumulator.accumulateBug(new BugInstance(this,
"SA_FIELD_SELF_" + op, NORMAL_PRIORITY)
.addClassAndMethod(this).addField(field0), this);
else if (opCode == IXOR && item0.equals(item1)) {
LocalVariableAnnotation localVariableAnnotation = LocalVariableAnnotation
.getLocalVariableAnnotation(this, item0);
if (localVariableAnnotation != null)
bugAccumulator.accumulateBug(new BugInstance(this,
"SA_LOCAL_SELF_" + op, HIGH_PRIORITY)
.addClassAndMethod(this).add(
localVariableAnnotation),this);
} else if (opCode == ISUB && registerLoadCount >= 2) { // let FindSelfComparison2 report this; more accurate
bugAccumulator.accumulateBug(new BugInstance(this,
"SA_LOCAL_SELF_" + op, (opCode == ISUB || opCode == LSUB || opCode == INVOKEINTERFACE || opCode == INVOKEVIRTUAL) ? NORMAL_PRIORITY : HIGH_PRIORITY)
.addClassAndMethod(this).add(
LocalVariableAnnotation
.getLocalVariableAnnotation(
getMethod(), whichRegister, getPC(),
getPC() - 1)),this);
}
}
}
}
|
Fix for the false positives?
git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@11339 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
|
findbugs/src/java/edu/umd/cs/findbugs/detect/FindSelfComparison.java
|
Fix for the false positives?
|
|
Java
|
lgpl-2.1
|
a36ac91b27461b5cd0f1c8c4f39228e637e4a6a0
| 0
|
gallandarakhneorg/checkstyle,autermann/checkstyle,gallandarakhneorg/checkstyle,baratali/checkstyle,zofuthan/checkstyle-1,ivanov-alex/checkstyle,vboerchers/checkstyle,WilliamRen/checkstyle,sirdis/checkstyle,sharang108/checkstyle,rnveach/checkstyle,attatrol/checkstyle,baratali/checkstyle,Bhavik3/checkstyle,llocc/checkstyle,AkshitaKukreja30/checkstyle,attatrol/checkstyle,checkstyle/checkstyle,liscju/checkstyle,jasonchaffee/checkstyle,sharang108/checkstyle,HubSpot/checkstyle,baratali/checkstyle,sabaka/checkstyle,FeodorFitsner/checkstyle,beckerhd/checkstyle,AkshitaKukreja30/checkstyle,checkstyle/checkstyle,Bhavik3/checkstyle,bansalayush/checkstyle,jonmbake/checkstyle,romani/checkstyle,izishared/checkstyle,attatrol/checkstyle,izishared/checkstyle,sharang108/checkstyle,FeodorFitsner/checkstyle,designreuse/checkstyle,jochenvdv/checkstyle,ilanKeshet/checkstyle,zofuthan/checkstyle-1,autermann/checkstyle,llocc/checkstyle,vboerchers/checkstyle,ilanKeshet/checkstyle,sabaka/checkstyle,ivanov-alex/checkstyle,jasonchaffee/checkstyle,ivanov-alex/checkstyle,checkstyle/checkstyle,sirdis/checkstyle,ilanKeshet/checkstyle,bansalayush/checkstyle,llocc/checkstyle,HubSpot/checkstyle,MEZk/checkstyle,bansalayush/checkstyle,AkshitaKukreja30/checkstyle,rnveach/checkstyle,beckerhd/checkstyle,autermann/checkstyle,gallandarakhneorg/checkstyle,liscju/checkstyle,romani/checkstyle,rnveach/checkstyle,HubSpot/checkstyle,checkstyle/checkstyle,nikhilgupta23/checkstyle,philwebb/checkstyle,checkstyle/checkstyle,romani/checkstyle,designreuse/checkstyle,izishared/checkstyle,WilliamRen/checkstyle,WilliamRen/checkstyle,jonmbake/checkstyle,rnveach/checkstyle,MEZk/checkstyle,jochenvdv/checkstyle,romani/checkstyle,philwebb/checkstyle,liscju/checkstyle,designreuse/checkstyle,sabaka/checkstyle,zofuthan/checkstyle-1,vboerchers/checkstyle,FeodorFitsner/checkstyle,jonmbake/checkstyle,jochenvdv/checkstyle,rnveach/checkstyle,romani/checkstyle,rnveach/checkstyle,MEZk/checkstyle,jasonchaffee/checkstyle,sirdis/checkstyle,nikhilgupta23/checkstyle,checkstyle/checkstyle,nikhilgupta23/checkstyle,beckerhd/checkstyle,romani/checkstyle,philwebb/checkstyle,Bhavik3/checkstyle
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2015 the original author or authors.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle;
import java.util.Properties;
/**
* Resolves external properties from an
* underlying {@code Properties} object.
*
* @author lkuehne
*/
public final class PropertiesExpander
implements PropertyResolver {
/** The underlying Properties object. */
private final Properties properties = new Properties();
/**
* Creates a new PropertiesExpander.
* @param properties the underlying properties to use for
* property resolution.
* @throws IllegalArgumentException indicates null was passed
* @noinspection CollectionDeclaredAsConcreteClass
*/
public PropertiesExpander(Properties properties) {
if (properties == null) {
throw new IllegalArgumentException("cannot pass null");
}
this.properties.putAll(properties);
}
@Override
public String resolve(String name) {
return properties.getProperty(name);
}
}
|
src/main/java/com/puppycrawl/tools/checkstyle/PropertiesExpander.java
|
////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2015 the original author or authors.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle;
import java.util.Properties;
/**
* Resolves external properties from an
* underlying {@code Properties} object.
*
* @author lkuehne
*/
public final class PropertiesExpander
implements PropertyResolver {
/** The underlying Properties object. */
private final Properties properties = new Properties();
/**
* Creates a new PropertiesExpander.
* @param properties the underlying properties to use for
* property resolution.
* @throws IllegalArgumentException indicates null was passed
*/
public PropertiesExpander(Properties properties) {
if (properties == null) {
throw new IllegalArgumentException("cannot pass null");
}
this.properties.putAll(properties);
}
@Override
public String resolve(String name) {
return properties.getProperty(name);
}
}
|
Revert "Issue #2065: additional attempt to resolve violation on TeamCity"
This reverts commit 652848ae065112ae0b7417a0d0109afd3ef0f04f.
|
src/main/java/com/puppycrawl/tools/checkstyle/PropertiesExpander.java
|
Revert "Issue #2065: additional attempt to resolve violation on TeamCity"
|
|
Java
|
lgpl-2.1
|
9313f0af2ef120fc3262721dcf6ae38d8dfa5eb9
| 0
|
EnFlexIT/AgentWorkbench,EnFlexIT/AgentWorkbench,EnFlexIT/AgentWorkbench,EnFlexIT/AgentWorkbench
|
package mas.projects.contmas.agents;
import jade.content.AgentAction;
import jade.content.Concept;
import jade.content.lang.Codec.CodecException;
import jade.content.onto.OntologyException;
import jade.content.onto.UngroundedException;
import jade.core.Agent;
import jade.domain.FIPANames;
import jade.lang.acl.ACLMessage;
import jade.lang.acl.MessageTemplate;
import jade.proto.AchieveREResponder;
import java.util.Random;
import mas.projects.contmas.ontology.*;
public class RandomGeneratorAgent extends ContainerAgent{
public RandomGeneratorAgent() {
super("random-generation");
}
protected void setup(){
super.setup();
//create filter for incoming messages
MessageTemplate mt = AchieveREResponder.createMessageTemplate(FIPANames.InteractionProtocol.FIPA_REQUEST);
addBehaviour(new createRandomBayMap (this,mt));
mt = AchieveREResponder.createMessageTemplate(FIPANames.InteractionProtocol.FIPA_REQUEST);
addBehaviour(new populateBayMap (this,mt));
}
public class createRandomBayMap extends AchieveREResponder{
public createRandomBayMap(Agent a, MessageTemplate mt) {
super(a, mt);
}
protected ACLMessage prepareResponse(ACLMessage request) {
ACLMessage reply = request.createReply();
Concept content;
try {
content = ((AgentAction) getContentManager().extractContent(request));
if(content instanceof RequestRandomBayMap) {
RequestRandomBayMap input=(RequestRandomBayMap) content;
reply.setPerformative(ACLMessage.INFORM);
Integer width, length, height;
Random RandomGenerator=new Random();
width=RandomGenerator.nextInt(input.getX_dimension());
length=RandomGenerator.nextInt(input.getY_dimension());
height=RandomGenerator.nextInt(input.getZ_dimension());
BayMap LoadBay=new BayMap();
LoadBay.setX_dimension(width);
LoadBay.setY_dimension(length);
LoadBay.setZ_dimension(height);
ProvideBayMap act=new ProvideBayMap();
act.setProvides(LoadBay);
getContentManager().fillContent(reply, act);
} else {
reply.setPerformative(ACLMessage.NOT_UNDERSTOOD);
reply.setContent("Fehler");
}
return reply;
} catch (UngroundedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CodecException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (OntologyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
} // end prepareResponse()
}
public class populateBayMap extends AchieveREResponder{
public populateBayMap(Agent a, MessageTemplate mt) {
super(a, mt);
}
protected ACLMessage prepareResponse(ACLMessage request) {
ACLMessage reply = request.createReply();
AgentAction content;
try {
content = ((AgentAction) getContentManager().extractContent(request));
if(content instanceof RequestPopulatedBayMap) {
BayMap LoadBay=((RequestPopulatedBayMap) content).getPopulate_on();
reply.setPerformative(ACLMessage.INFORM);
Integer width, length, height;
Random RandomGenerator=new Random();
String containerName;
Container c;
BlockAddress ba;
//old
width=LoadBay.getX_dimension();
length=LoadBay.getY_dimension();
height=LoadBay.getZ_dimension();
for(int z=0;z<height;z++){
for(int y=0;y<length;y++){
for(int x=0;x<width;x++){
if(RandomGenerator.nextInt(2)==1 && (z==0 || 1==2)){ //TODO Abfrage, ob unterer Container schon vorhanden (keine Container "in die Luft" stellen)
containerName="Container-ID: #"+RandomGenerator.nextInt(65000);
c=new Container();
ba=new BlockAddress();
ba.setAddresses_within(LoadBay);
ba.setX_dimension(x);
ba.setY_dimension(y);
ba.setZ_dimension(z);
c.setOccupies(ba);
c.setId(containerName);
LoadBay.addIs_filled_with(ba);
}
}
}
}
//end old
ProvidePopulatedBayMap act=new ProvidePopulatedBayMap();
act.setProvides(LoadBay);
getContentManager().fillContent(reply, act);
} else {
reply.setPerformative(ACLMessage.NOT_UNDERSTOOD);
reply.setContent("Fehler");
}
return reply;
} catch (UngroundedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CodecException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (OntologyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
}
|
src/mas/projects/contmas/agents/RandomGeneratorAgent.java
|
package mas.projects.contmas.agents;
import jade.content.AgentAction;
import jade.content.Concept;
import jade.content.lang.Codec.CodecException;
import jade.content.onto.OntologyException;
import jade.content.onto.UngroundedException;
import jade.core.Agent;
import jade.domain.FIPANames;
import jade.lang.acl.ACLMessage;
import jade.lang.acl.MessageTemplate;
import jade.proto.AchieveREResponder;
import java.util.Random;
import mas.projects.contmas.ontology.*;
public class RandomGeneratorAgent extends ContainerAgent{
public RandomGeneratorAgent() {
super("random-generation");
}
protected void setup(){
super.setup();
//create filter for incoming messages
MessageTemplate mt = AchieveREResponder.createMessageTemplate(FIPANames.InteractionProtocol.FIPA_REQUEST);
addBehaviour(new createRandomBayMap (this,mt));
mt = AchieveREResponder.createMessageTemplate(FIPANames.InteractionProtocol.FIPA_REQUEST);
addBehaviour(new populateBayMap (this,mt));
}
public class createRandomBayMap extends AchieveREResponder{
public createRandomBayMap(Agent a, MessageTemplate mt) {
super(a, mt);
}
protected ACLMessage prepareResponse(ACLMessage request) {
ACLMessage reply = request.createReply();
Concept content;
try {
content = ((AgentAction) getContentManager().extractContent(request));
if(content instanceof RequestRandomBayMap) {
RequestRandomBayMap input=(RequestRandomBayMap) content;
reply.setPerformative(ACLMessage.INFORM);
Integer width, length, height;
Random RandomGenerator=new Random();
width=RandomGenerator.nextInt(input.getX_dimension());
length=RandomGenerator.nextInt(input.getY_dimension());
height=RandomGenerator.nextInt(input.getZ_dimension());
BayMap LoadBay=new BayMap();
LoadBay.setX_dimension(width);
LoadBay.setY_dimension(length);
LoadBay.setZ_dimension(height);
ProvideBayMap act=new ProvideBayMap();
act.setProvides(LoadBay);
getContentManager().fillContent(reply, act);
} else {
reply.setPerformative(ACLMessage.NOT_UNDERSTOOD);
reply.setContent("Fehler");
}
return reply;
} catch (UngroundedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CodecException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (OntologyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
} // end prepareResponse()
}
public class populateBayMap extends AchieveREResponder{
public populateBayMap(Agent a, MessageTemplate mt) {
super(a, mt);
}
protected ACLMessage prepareResponse(ACLMessage request) {
ACLMessage reply = request.createReply();
AgentAction content;
try {
content = ((AgentAction) getContentManager().extractContent(request));
if(content instanceof RequestPopulatedBayMap) {
BayMap LoadBay=((RequestPopulatedBayMap) content).getPopulate_on();
reply.setPerformative(ACLMessage.INFORM);
Integer width, length, height;
Random RandomGenerator=new Random();
String containerName;
Container c;
BlockAddress ba;
//old
width=LoadBay.getX_dimension();
length=LoadBay.getY_dimension();
height=LoadBay.getZ_dimension();
for(int z=0;z<height;z++){
for(int y=0;y<length;y++){
for(int x=0;x<width;x++){
if(RandomGenerator.nextInt(2)==1 && (z==0 || 1==2)){ //TODO Abfrage, ob unterer Container schon vorhanden (keine Container "in die Luft" stellen)
containerName="Container-ID: #"+RandomGenerator.nextInt(65000);
c=new Container();
ba=new BlockAddress();
ba.setAddresses_within(LoadBay);
ba.setX_dimension(x);
ba.setY_dimension(y);
ba.setZ_dimension(z);
c.setOccupies(ba);
c.setId(containerName);
}
}
}
}
//end old
ProvidePopulatedBayMap act=new ProvidePopulatedBayMap();
act.setProvides(LoadBay);
getContentManager().fillContent(reply, act);
} else {
reply.setPerformative(ACLMessage.NOT_UNDERSTOOD);
reply.setContent("Fehler");
}
return reply;
} catch (UngroundedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (CodecException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (OntologyException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
}
|
git-svn-id: svn://project.agent-hygrid.net/agentgui/trunk/AgentGUI@33 014e83ad-c670-0410-b1a0-9290c87bb784
|
src/mas/projects/contmas/agents/RandomGeneratorAgent.java
| ||
Java
|
lgpl-2.1
|
80840ff9aa4a7fab111a1c8c31d55fb014f20328
| 0
|
ethaneldridge/vassal,ethaneldridge/vassal,ethaneldridge/vassal
|
/*
*
* Copyright (c) 2008 by Joel Uckelman
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.tools;
import java.io.PrintWriter;
import java.io.StringWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Joel Uckelman
* @since 3.1.0
*/
public class ThrowableUtils {
private ThrowableUtils() {}
private static final Logger logger =
LoggerFactory.getLogger(ThrowableUtils.class);
/**
* Returns the most recent {@link Throwable} of class <code>T</code> in
* the proper causal history of the given <code>Throwable</code>, if one
* exists.
*
* @param cl the {@link Class} to search for
* @param t the <code>Throwable</code> to check
* @return the proper ancestor of class <code>T</code>, or <code>null</code>
* if none exists
*/
public static <T extends Throwable> T getAncestor(Class<T> cl, Throwable t) {
// traverse the causal history of t until a cause of type cl is found
for (Throwable c = t.getCause(); c != null; c = c.getCause()) {
if (cl.isInstance(c)) return cl.cast(c);
}
return null;
}
/**
* Returns the most recent {@link Throwable} of class <code>T</code> in
* the (not necessarily proper) causal history of the given
* <code>Throwable</code>, if one exists. If the given
* <code>Throwable</code> is of class <code>T</code>, it will be returned.
*
* @param cl the {@link Class} to search for
* @param t the <code>Throwable</code> to check
* @return the ancestor of class <code>T</code>, or <code>null</code>
* if none exists
*/
public static <T extends Throwable> T getRecent(Class<T> cl, Throwable t) {
if (cl.isInstance(t)) return cl.cast(t);
return getAncestor(cl, t);
}
/**
* Throws the most recent {@link Throwable} of class <code>T</code> in
* the proper causal history of the given <code>Throwable</code>, if one
* exists.
*
* @param cl the <code>Class</code> to search for
* @param t the <code>Throwable</code> to check
* @throws T if an ancestor of that class is found
*/
public static <T extends Throwable> void throwAncestor(
Class<T> cl, Throwable t) throws T {
final T ancestor = getAncestor(cl, t);
if (ancestor != null) throwMe(cl, ancestor);
}
/**
* Throws the most recent {@link Throwable} of class <code>T</code> in
* the (not necessarily proper) causal history of the given
* <code>Throwable</code>, if one exists.
*
* @param cl the <code>Class</code> to search for
* @param t the <code>Throwable</code> to check
* @throws T if an ancestor of that class is found
*/
public static <T extends Throwable> void throwRecent(Class<T> cl,
Throwable t) throws T {
if (cl.isInstance(t)) throwMe(cl, t);
else throwAncestor(cl, t);
}
private static <T extends Throwable> void throwMe(Class<T> cl, Throwable t)
throws T {
T toThrow = null;
try {
toThrow = cl.cast(cl.getConstructor().newInstance().initCause(t));
}
catch (Throwable ignore) {
// If anything happens here, we're screwed anyway, as we're already
// calling this during error handling. Just log it and soldier on.
logger.warn("ignored", ignore); //NON-NLS
}
if (toThrow != null) throw toThrow;
}
/**
* Converts a {@link Throwable}'s stack trace to a {@link String}.
*
* @param thrown the <code>Throwable</code> with the stack trace to convert
* @return the stack trace as a <code>String</code>
*/
public static String getStackTrace(Throwable thrown) {
final StringWriter sw = new StringWriter();
final PrintWriter pw = new PrintWriter(sw);
thrown.printStackTrace(pw);
pw.flush();
return sw.toString();
}
}
|
vassal-app/src/main/java/VASSAL/tools/ThrowableUtils.java
|
/*
*
* Copyright (c) 2008 by Joel Uckelman
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.tools;
import java.io.PrintWriter;
import java.io.StringWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Joel Uckelman
* @since 3.1.0
*/
public class ThrowableUtils {
private ThrowableUtils() {}
private static final Logger logger =
LoggerFactory.getLogger(ThrowableUtils.class);
/**
* Returns the most recent {@link Throwable} of class <code>T</code> in
* the proper causal history of the given <code>Throwable</code>, if one
* exists.
*
* @param cl the {@link Class} to search for
* @param t the <code>Throwable</code> to check
* @return the proper ancestor of class <code>T</code>, or <code>null</code>
* if none exists
*/
public static <T extends Throwable> T getAncestor(Class<T> cl, Throwable t) {
// traverse the causal history of t until a cause of type cl is found
for (Throwable c = t.getCause(); c != null; c = c.getCause()) {
if (cl.isInstance(c)) return cl.cast(c);
}
return null;
}
/**
* Returns the most recent {@link Throwable} of class <code>T</code> in
* the (not necessarily proper) causal history of the given
* <code>Throwable</code>, if one exists. If the given
* <code>Throwable</code> is of class <code>T</code>, it will be returned.
*
* @param cl the {@link Class} to search for
* @param t the <code>Throwable</code> to check
* @return the ancestor of class <code>T</code>, or <code>null</code>
* if none exists
*/
public static <T extends Throwable> T getRecent(Class<T> cl, Throwable t) {
if (cl.isInstance(t)) return cl.cast(t);
return getAncestor(cl, t);
}
/**
* Throws the most recent {@link Throwable} of class <code>T</code> in
* the proper causal history of the given <code>Throwable</code>, if one
* exists.
*
* @param cl the <code>Class</code> to search for
* @param t the <code>Throwable</code> to check
* @throws T if an ancestor of that class is found
*/
public static <T extends Throwable> void throwAncestor(
Class<T> cl, Throwable t) throws T {
final T ancestor = getAncestor(cl, t);
if (ancestor != null) throwMe(cl, t);
}
/**
* Throws the most recent {@link Throwable} of class <code>T</code> in
* the (not necessarily proper) causal history of the given
* <code>Throwable</code>, if one exists.
*
* @param cl the <code>Class</code> to search for
* @param t the <code>Throwable</code> to check
* @throws T if an ancestor of that class is found
*/
public static <T extends Throwable> void throwRecent(Class<T> cl,
Throwable t) throws T {
if (cl.isInstance(t)) throwMe(cl, t);
else throwAncestor(cl, t);
}
private static <T extends Throwable> void throwMe(Class<T> cl, Throwable t)
throws T {
T toThrow = null;
try {
toThrow = cl.cast(cl.getConstructor().newInstance(t));
}
catch (Throwable ignore) {
// If anything happens here, we're screwed anyway, as we're already
// calling this during error handling. Just log it and soldier on.
logger.warn("ignored", ignore); //NON-NLS
}
if (toThrow != null) throw toThrow;
}
/**
* Converts a {@link Throwable}'s stack trace to a {@link String}.
*
* @param thrown the <code>Throwable</code> with the stack trace to convert
* @return the stack trace as a <code>String</code>
*/
public static String getStackTrace(Throwable thrown) {
final StringWriter sw = new StringWriter();
final PrintWriter pw = new PrintWriter(sw);
thrown.printStackTrace(pw);
pw.flush();
return sw.toString();
}
}
|
Fixed two bugs in ThrowableUtils.
|
vassal-app/src/main/java/VASSAL/tools/ThrowableUtils.java
|
Fixed two bugs in ThrowableUtils.
|
|
Java
|
apache-2.0
|
6bff6229b6d7a7b1426522e60824edb4ba38f74c
| 0
|
jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim
|
/*
* JaamSim Discrete Event Simulation
* Copyright (C) 2011 Ausenco Engineering Canada Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package com.jaamsim.ui;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.ArrayList;
import java.util.Enumeration;
import javax.swing.JFrame;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.event.TreeModelEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import com.jaamsim.controllers.RenderManager;
import com.jaamsim.input.Input;
import com.jaamsim.input.InputAgent;
import com.jaamsim.input.KeywordIndex;
import com.jaamsim.math.Vec3d;
import com.sandwell.JavaSimulation.Entity;
import com.sandwell.JavaSimulation.ObjectType;
import com.sandwell.JavaSimulation.Simulation;
import com.sandwell.JavaSimulation3D.DisplayEntity;
import com.sandwell.JavaSimulation3D.GUIFrame;
import com.sandwell.JavaSimulation3D.Text;
public class ObjectSelector extends FrameBox {
private static ObjectSelector myInstance;
// Tree view properties
private DefaultMutableTreeNode top;
private final DefaultTreeModel treeModel;
private final JTree tree;
private final JScrollPane treeView;
public static Entity currentEntity;
private long entSequence;
public ObjectSelector() {
super( "Object Selector" );
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
addWindowListener(FrameBox.getCloseListener("ShowObjectSelector"));
top = new DefaultMutableTreeNode( "Defined Objects");
treeModel = new DefaultTreeModel(top);
tree = new JTree();
tree.setModel(treeModel);
tree.getSelectionModel().setSelectionMode( TreeSelectionModel.SINGLE_TREE_SELECTION );
updateTree();
treeView = new JScrollPane(tree);
getContentPane().add(treeView);
entSequence = 0;
setLocation(GUIFrame.COL1_START, GUIFrame.BOTTOM_START);
setSize(GUIFrame.COL1_WIDTH, GUIFrame.HALF_BOTTOM);
tree.addTreeSelectionListener( new MyTreeSelectionListener() );
treeModel.addTreeModelListener( new MyTreeModelListener(tree) );
tree.addMouseListener(new MyMouseListener());
tree.addKeyListener(new MyKeyListener());
}
@Override
public void setEntity(Entity ent) {
if (ent == currentEntity)
return;
currentEntity = ent;
long curSequence = Entity.getEntitySequence();
if (entSequence != curSequence) {
entSequence = curSequence;
updateTree();
}
if (currentEntity == null) {
tree.setSelectionPath(null);
return;
}
tree.setEditable(true);
DefaultMutableTreeNode root = (DefaultMutableTreeNode)tree.getModel().getRoot();
Enumeration<?> e = root.depthFirstEnumeration();
while (e.hasMoreElements()) {
DefaultMutableTreeNode aNode = (DefaultMutableTreeNode)e.nextElement();
if (aNode.getUserObject() == currentEntity) {
TreePath path = new TreePath(aNode.getPath());
tree.scrollPathToVisible(path);
tree.setSelectionPath(path);
return;
}
}
}
@Override
public void updateValues(double simTime) {
if (!this.isVisible())
return;
long curSequence = Entity.getEntitySequence();
if (entSequence != curSequence) {
entSequence = curSequence;
updateTree();
}
}
/**
* Returns the only instance of the Object Selector
*/
public static synchronized ObjectSelector getInstance() {
if (myInstance == null)
myInstance = new ObjectSelector();
return myInstance;
}
private synchronized static void killInstance() {
myInstance = null;
}
@Override
public void dispose() {
killInstance();
currentEntity = null;
super.dispose();
}
private void updateTree() {
// Make a best-effort attempt to find all used classes...can race with
// object creation/deletion, but that's ok
ArrayList<Class<? extends Entity>> used = new ArrayList<Class<? extends Entity>>();
for (int i = 0; i < Entity.getAll().size(); i++) {
try {
Class<? extends Entity> klass = Entity.getAll().get(i).getClass();
if (!used.contains(klass))
used.add(klass);
}
catch (IndexOutOfBoundsException e) {}
}
ArrayList<String> palettes = new ArrayList<String>();
for (int j = 0; j < ObjectType.getAll().size(); j++) {
ObjectType type = null;
try {
type = ObjectType.getAll().get(j);
}
catch (IndexOutOfBoundsException e) {
break;
}
if (!palettes.contains(type.getPaletteName()))
palettes.add(type.getPaletteName());
}
for (int k = 0; k < palettes.size(); k++) {
String palName = palettes.get(k);
DefaultMutableTreeNode palNode = getNodeFor_In(palName, top);
for (int j = 0; j < ObjectType.getAll().size(); j++) {
ObjectType type = null;
try {
type = ObjectType.getAll().get(j);
}
catch (IndexOutOfBoundsException e) {
break;
}
if(!palName.equals( type.getPaletteName()))
continue;
Class<? extends Entity> proto = type.getJavaClass();
// skip unused classes
DefaultMutableTreeNode classNode = getNodeFor_In(proto.getSimpleName(), palNode);
if (!used.contains(proto)) {
if( classNode != null ) {
classNode.removeAllChildren();
classNode.removeFromParent();
}
continue;
}
for (int i = 0; i < Entity.getAll().size(); i++) {
try {
Entity each = Entity.getAll().get(i);
// Skip all that do not match the current class
if (each.getClass() != proto)
continue;
// skip locked Entities
if (each.testFlag(Entity.FLAG_LOCKED))
continue;
DefaultMutableTreeNode eachNode = getNodeFor_In(each, classNode);
if(classNode.getIndex(eachNode) < 0)
classNode.add(eachNode);
}
catch (IndexOutOfBoundsException e) {
continue;
}
}
// Remove the killed entities from the class node
Enumeration<?> enumeration = classNode.children();
while (enumeration.hasMoreElements ()) {
DefaultMutableTreeNode each = (DefaultMutableTreeNode) enumeration.nextElement();
if (!Entity.getAll().contains(each.getUserObject())) {
classNode.remove(each);
}
}
if(!classNode.isLeaf()) {
// Class node does not exist in the package node
if(palNode.getIndex(classNode) < 0) {
palNode.add(classNode);
}
}
else if( palNode.getIndex(classNode) >= 0) {
palNode.remove(classNode);
}
}
// Palette node is not empty
if(!palNode.isLeaf()) {
if(top.getIndex(palNode) < 0)
top.add(palNode);
}
else if(top.getIndex(palNode) >= 0) {
top.remove(palNode);
}
}
// Store all the expanded paths
Enumeration<TreePath> expandedPaths = tree.getExpandedDescendants(new TreePath(top));
TreePath selectedPath = tree.getSelectionPath();
treeModel.reload(top); // refresh tree
// Restore all expanded paths and the selected path
tree.setSelectionPath(selectedPath);
while (expandedPaths != null && expandedPaths.hasMoreElements())
{
TreePath path = expandedPaths.nextElement();
tree.expandPath(path);
}
}
/**
* Return a node of userObject in parent
*/
private static DefaultMutableTreeNode getNodeFor_In(Object userObject, DefaultMutableTreeNode parent) {
// obtain all the children in parent
Enumeration<?> enumeration = parent.children();
while (enumeration.hasMoreElements ()) {
DefaultMutableTreeNode eachNode = (DefaultMutableTreeNode) enumeration.nextElement();
if( eachNode.getUserObject() == userObject ||
userObject instanceof String && ((String) userObject).equals(eachNode.getUserObject()) ) {
// This child already exists in parent
return eachNode;
}
}
// Child does not exist in parent; create it
return new DefaultMutableTreeNode(userObject, true);
}
static class MyTreeSelectionListener implements TreeSelectionListener {
@Override
public void valueChanged( TreeSelectionEvent e ) {
JTree tree = (JTree) e.getSource();
if(tree.getLastSelectedPathComponent() == null) {
// This occurs when we set no selected entity (null) and then
// force the tree to have a null selected node
return;
}
DefaultMutableTreeNode node = (DefaultMutableTreeNode)tree.getLastSelectedPathComponent();
if (node.getUserObject() instanceof Entity) {
Entity entity = (Entity)node.getUserObject();
FrameBox.setSelectedEntity(entity);
}
else {
FrameBox.setSelectedEntity(null);
}
}
}
static class MyTreeModelListener implements TreeModelListener {
private final JTree tree;
public MyTreeModelListener(JTree tree) {
this.tree = tree;
}
@Override
public void treeNodesChanged( TreeModelEvent e ) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)tree.getLastSelectedPathComponent();
String newName = ((String)node.getUserObject()).trim();
// Check that the entity was defined AFTER the RecordEdits command
if (!currentEntity.testFlag(Entity.FLAG_ADDED)) {
JOptionPane.showMessageDialog(null, "Cannot rename an entity that was defined before the RecordEdits command.",
"Input Error", JOptionPane.ERROR_MESSAGE);
node.setUserObject(currentEntity);
return;
}
// Check that the new name is valid
if (newName.contains(" ") || newName.contains("\t") || newName.contains("{") || newName.contains("}")) {
JOptionPane.showMessageDialog(null, "Entity names cannot contain spaces, tabs, or braces ({}).",
"Input Error", JOptionPane.ERROR_MESSAGE);
node.setUserObject(currentEntity);
return;
}
// Check that the name has not been used already
Entity existingEnt = Input.tryParseEntity(newName, Entity.class);
if (existingEnt != null) {
JOptionPane.showMessageDialog(null, String.format("Entity name: %s is already in use.", newName),
"Input Error", JOptionPane.ERROR_MESSAGE);
node.setUserObject(currentEntity);
return;
}
// Rename the entity
currentEntity.setInputName(newName);
node.setUserObject(currentEntity);
FrameBox.setSelectedEntity(currentEntity);
}
@Override
public void treeNodesInserted(TreeModelEvent e) {}
@Override
public void treeNodesRemoved(TreeModelEvent e) {}
@Override
public void treeStructureChanged(TreeModelEvent e) {}
}
static class InputMenuItem extends MenuItem {
private final Entity ent;
public InputMenuItem(Entity ent) {
super("Input Editor");
this.ent = ent;
}
@Override
public void action() {
InputAgent.processEntity_Keyword_Value(Simulation.getInstance(), "ShowInputEditor", "TRUE");
FrameBox.setSelectedEntity(ent);
}
}
static class PropertyMenuItem extends MenuItem {
private final Entity ent;
public PropertyMenuItem(Entity ent) {
super("Property Viewer");
this.ent = ent;
}
@Override
public void action() {
InputAgent.processEntity_Keyword_Value(Simulation.getInstance(), "ShowPropertyViewer", "TRUE");
FrameBox.setSelectedEntity(ent);
}
}
static class OutputMenuItem extends MenuItem {
private final Entity ent;
public OutputMenuItem(Entity ent) {
super("Output Viewer");
this.ent = ent;
}
@Override
public void action() {
InputAgent.processEntity_Keyword_Value(Simulation.getInstance(), "ShowOutputViewer", "TRUE");
FrameBox.setSelectedEntity(ent);
}
}
static class DuplicateMenuItem extends MenuItem {
private final Entity ent;
public DuplicateMenuItem(Entity ent) {
super("Duplicate");
this.ent = ent;
}
@Override
public void action() {
Entity copiedEntity = InputAgent.defineEntityWithUniqueName(ent.getClass(),
ent.getInputName(), "_Copy", true);
// Match all the inputs
copiedEntity.copyInputs(ent);
// Position the duplicated entity next to the original
if (copiedEntity instanceof DisplayEntity) {
DisplayEntity dEnt = (DisplayEntity)copiedEntity;
Vec3d pos = dEnt.getPosition();
pos.x += 0.5d * dEnt.getSize().x;
pos.y -= 0.5d * dEnt.getSize().y;
dEnt.setPosition(pos);
// Set the input for the "Position" keyword to the new value
KeywordIndex kw = InputAgent.formatPointInputs("Position", pos, "m");
InputAgent.apply(dEnt, kw);
}
// Show the duplicated entity in the editors and viewers
FrameBox.setSelectedEntity(copiedEntity);
}
}
static class DeleteMenuItem extends MenuItem {
private final Entity ent;
public DeleteMenuItem(Entity ent) {
super("Delete");
this.ent = ent;
}
@Override
public void action() {
ent.kill();
FrameBox.setSelectedEntity(null);
}
}
static class GraphicsMenuItem extends MenuItem {
private final DisplayEntity ent;
private final int x;
private final int y;
public GraphicsMenuItem(DisplayEntity ent, int x, int y) {
super("Change Graphics");
this.ent = ent;
this.x = x;
this.y = y;
}
@Override
public void action() {
// More than one DisplayModel(LOD) or No DisplayModel
if(ent.getDisplayModelList() == null)
return;
GraphicBox graphicBox = GraphicBox.getInstance(ent, x, y);
graphicBox.setVisible( true );
}
}
static class LabelMenuItem extends MenuItem {
private final DisplayEntity ent;
public LabelMenuItem(DisplayEntity ent) {
super("Add Label");
this.ent = ent;
}
@Override
public void action() {
Text label = InputAgent.defineEntityWithUniqueName(Text.class, "Text", "", true);
InputAgent.processEntity_Keyword_Value(label, "RelativeEntity", ent.getInputName() );
if (ent.getCurrentRegion() != null)
InputAgent.processEntity_Keyword_Value(label, "Region", ent.getCurrentRegion().getInputName());
InputAgent.processEntity_Keyword_Value(label, "Position", "0.0 -0.65 0.0 m" );
InputAgent.processEntity_Keyword_Value(label, "TextHeight", "0.15 m" );
InputAgent.processEntity_Keyword_Value(label, "Format", "%s");
InputAgent.processEntity_Keyword_Value(label, "OutputName", String.format("%s Name", ent.getInputName()) );
FrameBox.setSelectedEntity(label);
}
}
static class CenterInViewMenuItem extends MenuItem {
private final DisplayEntity ent;
private final View v;
public CenterInViewMenuItem(DisplayEntity ent, View v) {
super("Center in View");
this.ent = ent;
this.v = v;
}
@Override
public void action() {
// Move the camera position so that the entity is in the centre of the screen
Vec3d viewPos = new Vec3d(v.getGlobalPosition());
viewPos.sub3(v.getGlobalCenter());
viewPos.add3(ent.getPosition());
v.setCenter(ent.getPosition());
v.setPosition(viewPos);
}
}
private static class JActionMenuItem extends JMenuItem
implements ActionListener {
private final MenuItem de;
public JActionMenuItem(MenuItem item) {
super(item.menuName);
de = item;
this.addActionListener(this);
}
@Override
public void actionPerformed(ActionEvent e) {
de.action();
}
}
/**
* A miscelaneous utility to populate a JPopupMenu with a list of DisplayEntity menu items (for the right click menu)
* @param menu
* @param menuItems
*/
public static void populateMenu(JPopupMenu menu, Entity ent, int x, int y) {
ArrayList<MenuItem> menuItems = getMenuItems(ent, x, y);
for (MenuItem item : menuItems) {
menu.add(new JActionMenuItem(item));
}
}
private static ArrayList<MenuItem> getMenuItems(Entity ent, int x, int y) {
ArrayList<MenuItem> list = new ArrayList<MenuItem>();
list.add(new InputMenuItem(ent));
list.add(new OutputMenuItem(ent));
list.add(new PropertyMenuItem(ent));
if (!ent.testFlag(Entity.FLAG_GENERATED))
list.add(new DuplicateMenuItem(ent));
list.add(new DeleteMenuItem(ent));
if (ent instanceof DisplayEntity) {
DisplayEntity dEnt = (DisplayEntity)ent;
if (RenderManager.isGood())
list.add(new GraphicsMenuItem(dEnt, x, y));
if (RenderManager.isGood()) {
View v = RenderManager.inst().getActiveView();
if (v != null) {
list.add(new LabelMenuItem(dEnt));
list.add(new CenterInViewMenuItem(dEnt, v));
}
}
}
if (ent instanceof MenuItemEntity)
((MenuItemEntity)ent).gatherMenuItems(list, x, y);
return list;
}
static class MyMouseListener implements MouseListener {
private final JPopupMenu menu= new JPopupMenu();
@Override
public void mouseClicked(MouseEvent e) {
if(e.getButton() != MouseEvent.BUTTON3)
return;
if(currentEntity == null)
return;
// Right mouse click on a movable DisplayEntity
menu.removeAll();
ObjectSelector.populateMenu(menu, currentEntity, e.getX(), e.getY());
menu.show(e.getComponent(), e.getX(), e.getX());
}
@Override
public void mouseEntered(MouseEvent e) {}
@Override
public void mouseExited(MouseEvent e) {}
@Override
public void mousePressed(MouseEvent e) {}
@Override
public void mouseReleased(MouseEvent e) {}
}
static class MyKeyListener implements KeyListener {
@Override
public void keyReleased(KeyEvent e) {
if (e.getKeyCode() != KeyEvent.VK_DELETE)
return;
if(currentEntity instanceof DisplayEntity ) {
DisplayEntity disp = (DisplayEntity)currentEntity;
if(! disp.isMovable())
return;
// Delete key was released on a movable DisplayEntity
disp.kill();
FrameBox.setSelectedEntity(null);
}
}
@Override
public void keyPressed(KeyEvent e) {}
@Override
public void keyTyped(KeyEvent e) {}
}
}
|
src/main/java/com/jaamsim/ui/ObjectSelector.java
|
/*
* JaamSim Discrete Event Simulation
* Copyright (C) 2011 Ausenco Engineering Canada Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package com.jaamsim.ui;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.util.ArrayList;
import java.util.Enumeration;
import javax.swing.JFrame;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTree;
import javax.swing.event.TreeModelEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import javax.swing.tree.TreeSelectionModel;
import com.jaamsim.controllers.RenderManager;
import com.jaamsim.input.Input;
import com.jaamsim.input.InputAgent;
import com.jaamsim.input.KeywordIndex;
import com.jaamsim.math.Vec3d;
import com.sandwell.JavaSimulation.Entity;
import com.sandwell.JavaSimulation.ObjectType;
import com.sandwell.JavaSimulation.Simulation;
import com.sandwell.JavaSimulation3D.DisplayEntity;
import com.sandwell.JavaSimulation3D.GUIFrame;
import com.sandwell.JavaSimulation3D.Text;
public class ObjectSelector extends FrameBox {
private static ObjectSelector myInstance;
// Tree view properties
private DefaultMutableTreeNode top;
private final DefaultTreeModel treeModel;
private final JTree tree;
private final JScrollPane treeView;
public static Entity currentEntity;
private long entSequence;
public ObjectSelector() {
super( "Object Selector" );
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
addWindowListener(FrameBox.getCloseListener("ShowObjectSelector"));
top = new DefaultMutableTreeNode( "Defined Objects");
treeModel = new DefaultTreeModel(top);
tree = new JTree();
tree.setModel(treeModel);
tree.getSelectionModel().setSelectionMode( TreeSelectionModel.SINGLE_TREE_SELECTION );
updateTree();
treeView = new JScrollPane(tree);
getContentPane().add(treeView);
entSequence = 0;
setLocation(GUIFrame.COL1_START, GUIFrame.BOTTOM_START);
setSize(GUIFrame.COL1_WIDTH, GUIFrame.HALF_BOTTOM);
tree.addTreeSelectionListener( new MyTreeSelectionListener() );
treeModel.addTreeModelListener( new MyTreeModelListener(tree) );
tree.addMouseListener(new MyMouseListener());
tree.addKeyListener(new MyKeyListener());
}
@Override
public void setEntity(Entity ent) {
if (ent == currentEntity)
return;
currentEntity = ent;
long curSequence = Entity.getEntitySequence();
if (entSequence != curSequence) {
entSequence = curSequence;
updateTree();
}
if (currentEntity == null) {
tree.setSelectionPath(null);
return;
}
// if the entity is an added entity, allow renaming. otherwise, do not.
if (currentEntity.testFlag(Entity.FLAG_ADDED))
tree.setEditable(true);
else
tree.setEditable(false);
DefaultMutableTreeNode root = (DefaultMutableTreeNode)tree.getModel().getRoot();
Enumeration<?> e = root.depthFirstEnumeration();
while (e.hasMoreElements()) {
DefaultMutableTreeNode aNode = (DefaultMutableTreeNode)e.nextElement();
if (aNode.getUserObject() == currentEntity) {
TreePath path = new TreePath(aNode.getPath());
tree.scrollPathToVisible(path);
tree.setSelectionPath(path);
return;
}
}
}
@Override
public void updateValues(double simTime) {
if (!this.isVisible())
return;
long curSequence = Entity.getEntitySequence();
if (entSequence != curSequence) {
entSequence = curSequence;
updateTree();
}
}
/**
* Returns the only instance of the Object Selector
*/
public static synchronized ObjectSelector getInstance() {
if (myInstance == null)
myInstance = new ObjectSelector();
return myInstance;
}
private synchronized static void killInstance() {
myInstance = null;
}
@Override
public void dispose() {
killInstance();
currentEntity = null;
super.dispose();
}
private void updateTree() {
// Make a best-effort attempt to find all used classes...can race with
// object creation/deletion, but that's ok
ArrayList<Class<? extends Entity>> used = new ArrayList<Class<? extends Entity>>();
for (int i = 0; i < Entity.getAll().size(); i++) {
try {
Class<? extends Entity> klass = Entity.getAll().get(i).getClass();
if (!used.contains(klass))
used.add(klass);
}
catch (IndexOutOfBoundsException e) {}
}
ArrayList<String> palettes = new ArrayList<String>();
for (int j = 0; j < ObjectType.getAll().size(); j++) {
ObjectType type = null;
try {
type = ObjectType.getAll().get(j);
}
catch (IndexOutOfBoundsException e) {
break;
}
if (!palettes.contains(type.getPaletteName()))
palettes.add(type.getPaletteName());
}
for (int k = 0; k < palettes.size(); k++) {
String palName = palettes.get(k);
DefaultMutableTreeNode palNode = getNodeFor_In(palName, top);
for (int j = 0; j < ObjectType.getAll().size(); j++) {
ObjectType type = null;
try {
type = ObjectType.getAll().get(j);
}
catch (IndexOutOfBoundsException e) {
break;
}
if(!palName.equals( type.getPaletteName()))
continue;
Class<? extends Entity> proto = type.getJavaClass();
// skip unused classes
DefaultMutableTreeNode classNode = getNodeFor_In(proto.getSimpleName(), palNode);
if (!used.contains(proto)) {
if( classNode != null ) {
classNode.removeAllChildren();
classNode.removeFromParent();
}
continue;
}
for (int i = 0; i < Entity.getAll().size(); i++) {
try {
Entity each = Entity.getAll().get(i);
// Skip all that do not match the current class
if (each.getClass() != proto)
continue;
// skip locked Entities
if (each.testFlag(Entity.FLAG_LOCKED))
continue;
DefaultMutableTreeNode eachNode = getNodeFor_In(each, classNode);
if(classNode.getIndex(eachNode) < 0)
classNode.add(eachNode);
}
catch (IndexOutOfBoundsException e) {
continue;
}
}
// Remove the killed entities from the class node
Enumeration<?> enumeration = classNode.children();
while (enumeration.hasMoreElements ()) {
DefaultMutableTreeNode each = (DefaultMutableTreeNode) enumeration.nextElement();
if (!Entity.getAll().contains(each.getUserObject())) {
classNode.remove(each);
}
}
if(!classNode.isLeaf()) {
// Class node does not exist in the package node
if(palNode.getIndex(classNode) < 0) {
palNode.add(classNode);
}
}
else if( palNode.getIndex(classNode) >= 0) {
palNode.remove(classNode);
}
}
// Palette node is not empty
if(!palNode.isLeaf()) {
if(top.getIndex(palNode) < 0)
top.add(palNode);
}
else if(top.getIndex(palNode) >= 0) {
top.remove(palNode);
}
}
// Store all the expanded paths
Enumeration<TreePath> expandedPaths = tree.getExpandedDescendants(new TreePath(top));
TreePath selectedPath = tree.getSelectionPath();
treeModel.reload(top); // refresh tree
// Restore all expanded paths and the selected path
tree.setSelectionPath(selectedPath);
while (expandedPaths != null && expandedPaths.hasMoreElements())
{
TreePath path = expandedPaths.nextElement();
tree.expandPath(path);
}
}
/**
* Return a node of userObject in parent
*/
private static DefaultMutableTreeNode getNodeFor_In(Object userObject, DefaultMutableTreeNode parent) {
// obtain all the children in parent
Enumeration<?> enumeration = parent.children();
while (enumeration.hasMoreElements ()) {
DefaultMutableTreeNode eachNode = (DefaultMutableTreeNode) enumeration.nextElement();
if( eachNode.getUserObject() == userObject ||
userObject instanceof String && ((String) userObject).equals(eachNode.getUserObject()) ) {
// This child already exists in parent
return eachNode;
}
}
// Child does not exist in parent; create it
return new DefaultMutableTreeNode(userObject, true);
}
static class MyTreeSelectionListener implements TreeSelectionListener {
@Override
public void valueChanged( TreeSelectionEvent e ) {
JTree tree = (JTree) e.getSource();
if(tree.getLastSelectedPathComponent() == null) {
// This occurs when we set no selected entity (null) and then
// force the tree to have a null selected node
return;
}
DefaultMutableTreeNode node = (DefaultMutableTreeNode)tree.getLastSelectedPathComponent();
if (node.getUserObject() instanceof Entity) {
Entity entity = (Entity)node.getUserObject();
FrameBox.setSelectedEntity(entity);
}
else {
FrameBox.setSelectedEntity(null);
}
}
}
static class MyTreeModelListener implements TreeModelListener {
private final JTree tree;
public MyTreeModelListener(JTree tree) {
this.tree = tree;
}
@Override
public void treeNodesChanged( TreeModelEvent e ) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)tree.getLastSelectedPathComponent();
String newName = ((String)node.getUserObject()).trim();
// Check that the new name is valid
if (newName.contains(" ") || newName.contains("\t") || newName.contains("{") || newName.contains("}")) {
JOptionPane.showMessageDialog(null, "Entity names cannot contain spaces, tabs, or braces ({}).",
"Input Error", JOptionPane.ERROR_MESSAGE);
node.setUserObject(currentEntity);
return;
}
// Check that the name has not been used already
Entity existingEnt = Input.tryParseEntity(newName, Entity.class);
if (existingEnt != null) {
JOptionPane.showMessageDialog(null, String.format("Entity name: %s is already in use.", newName),
"Input Error", JOptionPane.ERROR_MESSAGE);
node.setUserObject(currentEntity);
return;
}
// Rename the entity
currentEntity.setInputName(newName);
node.setUserObject(currentEntity);
FrameBox.setSelectedEntity(currentEntity);
}
@Override
public void treeNodesInserted(TreeModelEvent e) {}
@Override
public void treeNodesRemoved(TreeModelEvent e) {}
@Override
public void treeStructureChanged(TreeModelEvent e) {}
}
static class InputMenuItem extends MenuItem {
private final Entity ent;
public InputMenuItem(Entity ent) {
super("Input Editor");
this.ent = ent;
}
@Override
public void action() {
InputAgent.processEntity_Keyword_Value(Simulation.getInstance(), "ShowInputEditor", "TRUE");
FrameBox.setSelectedEntity(ent);
}
}
static class PropertyMenuItem extends MenuItem {
private final Entity ent;
public PropertyMenuItem(Entity ent) {
super("Property Viewer");
this.ent = ent;
}
@Override
public void action() {
InputAgent.processEntity_Keyword_Value(Simulation.getInstance(), "ShowPropertyViewer", "TRUE");
FrameBox.setSelectedEntity(ent);
}
}
static class OutputMenuItem extends MenuItem {
private final Entity ent;
public OutputMenuItem(Entity ent) {
super("Output Viewer");
this.ent = ent;
}
@Override
public void action() {
InputAgent.processEntity_Keyword_Value(Simulation.getInstance(), "ShowOutputViewer", "TRUE");
FrameBox.setSelectedEntity(ent);
}
}
static class DuplicateMenuItem extends MenuItem {
private final Entity ent;
public DuplicateMenuItem(Entity ent) {
super("Duplicate");
this.ent = ent;
}
@Override
public void action() {
Entity copiedEntity = InputAgent.defineEntityWithUniqueName(ent.getClass(),
ent.getInputName(), "_Copy", true);
// Match all the inputs
copiedEntity.copyInputs(ent);
// Position the duplicated entity next to the original
if (copiedEntity instanceof DisplayEntity) {
DisplayEntity dEnt = (DisplayEntity)copiedEntity;
Vec3d pos = dEnt.getPosition();
pos.x += 0.5d * dEnt.getSize().x;
pos.y -= 0.5d * dEnt.getSize().y;
dEnt.setPosition(pos);
// Set the input for the "Position" keyword to the new value
KeywordIndex kw = InputAgent.formatPointInputs("Position", pos, "m");
InputAgent.apply(dEnt, kw);
}
// Show the duplicated entity in the editors and viewers
FrameBox.setSelectedEntity(copiedEntity);
}
}
static class DeleteMenuItem extends MenuItem {
private final Entity ent;
public DeleteMenuItem(Entity ent) {
super("Delete");
this.ent = ent;
}
@Override
public void action() {
ent.kill();
FrameBox.setSelectedEntity(null);
}
}
static class GraphicsMenuItem extends MenuItem {
private final DisplayEntity ent;
private final int x;
private final int y;
public GraphicsMenuItem(DisplayEntity ent, int x, int y) {
super("Change Graphics");
this.ent = ent;
this.x = x;
this.y = y;
}
@Override
public void action() {
// More than one DisplayModel(LOD) or No DisplayModel
if(ent.getDisplayModelList() == null)
return;
GraphicBox graphicBox = GraphicBox.getInstance(ent, x, y);
graphicBox.setVisible( true );
}
}
static class LabelMenuItem extends MenuItem {
private final DisplayEntity ent;
public LabelMenuItem(DisplayEntity ent) {
super("Add Label");
this.ent = ent;
}
@Override
public void action() {
Text label = InputAgent.defineEntityWithUniqueName(Text.class, "Text", "", true);
InputAgent.processEntity_Keyword_Value(label, "RelativeEntity", ent.getInputName() );
if (ent.getCurrentRegion() != null)
InputAgent.processEntity_Keyword_Value(label, "Region", ent.getCurrentRegion().getInputName());
InputAgent.processEntity_Keyword_Value(label, "Position", "0.0 -0.65 0.0 m" );
InputAgent.processEntity_Keyword_Value(label, "TextHeight", "0.15 m" );
InputAgent.processEntity_Keyword_Value(label, "Format", "%s");
InputAgent.processEntity_Keyword_Value(label, "OutputName", String.format("%s Name", ent.getInputName()) );
FrameBox.setSelectedEntity(label);
}
}
static class CenterInViewMenuItem extends MenuItem {
private final DisplayEntity ent;
private final View v;
public CenterInViewMenuItem(DisplayEntity ent, View v) {
super("Center in View");
this.ent = ent;
this.v = v;
}
@Override
public void action() {
// Move the camera position so that the entity is in the centre of the screen
Vec3d viewPos = new Vec3d(v.getGlobalPosition());
viewPos.sub3(v.getGlobalCenter());
viewPos.add3(ent.getPosition());
v.setCenter(ent.getPosition());
v.setPosition(viewPos);
}
}
private static class JActionMenuItem extends JMenuItem
implements ActionListener {
private final MenuItem de;
public JActionMenuItem(MenuItem item) {
super(item.menuName);
de = item;
this.addActionListener(this);
}
@Override
public void actionPerformed(ActionEvent e) {
de.action();
}
}
/**
* A miscelaneous utility to populate a JPopupMenu with a list of DisplayEntity menu items (for the right click menu)
* @param menu
* @param menuItems
*/
public static void populateMenu(JPopupMenu menu, Entity ent, int x, int y) {
ArrayList<MenuItem> menuItems = getMenuItems(ent, x, y);
for (MenuItem item : menuItems) {
menu.add(new JActionMenuItem(item));
}
}
private static ArrayList<MenuItem> getMenuItems(Entity ent, int x, int y) {
ArrayList<MenuItem> list = new ArrayList<MenuItem>();
list.add(new InputMenuItem(ent));
list.add(new OutputMenuItem(ent));
list.add(new PropertyMenuItem(ent));
if (!ent.testFlag(Entity.FLAG_GENERATED))
list.add(new DuplicateMenuItem(ent));
list.add(new DeleteMenuItem(ent));
if (ent instanceof DisplayEntity) {
DisplayEntity dEnt = (DisplayEntity)ent;
if (RenderManager.isGood())
list.add(new GraphicsMenuItem(dEnt, x, y));
if (RenderManager.isGood()) {
View v = RenderManager.inst().getActiveView();
if (v != null) {
list.add(new LabelMenuItem(dEnt));
list.add(new CenterInViewMenuItem(dEnt, v));
}
}
}
if (ent instanceof MenuItemEntity)
((MenuItemEntity)ent).gatherMenuItems(list, x, y);
return list;
}
static class MyMouseListener implements MouseListener {
private final JPopupMenu menu= new JPopupMenu();
@Override
public void mouseClicked(MouseEvent e) {
if(e.getButton() != MouseEvent.BUTTON3)
return;
if(currentEntity == null)
return;
// Right mouse click on a movable DisplayEntity
menu.removeAll();
ObjectSelector.populateMenu(menu, currentEntity, e.getX(), e.getY());
menu.show(e.getComponent(), e.getX(), e.getX());
}
@Override
public void mouseEntered(MouseEvent e) {}
@Override
public void mouseExited(MouseEvent e) {}
@Override
public void mousePressed(MouseEvent e) {}
@Override
public void mouseReleased(MouseEvent e) {}
}
static class MyKeyListener implements KeyListener {
@Override
public void keyReleased(KeyEvent e) {
if (e.getKeyCode() != KeyEvent.VK_DELETE)
return;
if(currentEntity instanceof DisplayEntity ) {
DisplayEntity disp = (DisplayEntity)currentEntity;
if(! disp.isMovable())
return;
// Delete key was released on a movable DisplayEntity
disp.kill();
FrameBox.setSelectedEntity(null);
}
}
@Override
public void keyPressed(KeyEvent e) {}
@Override
public void keyTyped(KeyEvent e) {}
}
}
|
JS: show an error message when an entity cannot be renamed instead of failing without explanation
Signed-off-by: Harry King <4297d713059dfdc9fd310cd85bf547ea21a2d624@ausenco.com>
Signed-off-by: Harvey Harrison <eadbd6b462bf3c97df0300a934c12bc2e5d1fe51@ausenco.com>
|
src/main/java/com/jaamsim/ui/ObjectSelector.java
|
JS: show an error message when an entity cannot be renamed instead of failing without explanation
|
|
Java
|
apache-2.0
|
b4ee845c5a4da275c757e34685b674848e781a69
| 0
|
industrial-data-space/trusted-connector,industrial-data-space/trusted-connector,industrial-data-space/trusted-connector,industrial-data-space/trusted-connector,industrial-data-space/trusted-connector,industrial-data-space/trusted-connector
|
package de.fhg.aisec.dfpolicy;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.management.InstrumentationProcessor;
import org.apache.camel.processor.LogProcessor;
import org.apache.camel.processor.SendProcessor;
import org.apache.camel.AsyncCallback;
import org.apache.camel.AsyncProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MyProcessor implements AsyncProcessor {
private static final Logger LOG = LoggerFactory.getLogger(MyProcessor.class);
private Processor target;
//private ArrayList<LabelRule> label_rules;
//private ArrayList<AllowRule> allow_rules;
private HashMap<String, String> label_rules;
private HashMap<String, String> allow_rules;
public MyProcessor(Processor target) {
this.target = target;
//label_rules = new ArrayList<LabelRule>();
//allow_rules = new ArrayList<AllowRule>();
label_rules = new HashMap<String, String>();
allow_rules = new HashMap<String, String>();
loadRules("deploy/rules");
}
private void loadRules(String rulefile) {
FileInputStream fileinputstream;
// Get the object of DataInputStream
DataInputStream datainputstream;
BufferedReader bufferedreader;
final String LABEL_KEYWORD1 = "LABEL";
final String LABEL_KEYWORD2 = "AS";
final String ALLOW_KEYWORD1 = "ALLOW";
final String ALLOW_KEYWORD2 = "TO";
String line;
String uri;
String label;
String existing_label;
try {
fileinputstream = new FileInputStream(rulefile);
datainputstream = new DataInputStream(fileinputstream);
bufferedreader = new BufferedReader(new InputStreamReader(datainputstream));
while ((line = bufferedreader.readLine()) != null) {
//Remove unneeded spaces
line = line.replaceAll(" ", "");
//Check if it is a LABEL-rule that contains LABEL and AS, and both only once
if (check_rule_syntax(line, LABEL_KEYWORD1, LABEL_KEYWORD2)) {
// source = the string between the first and the second keyword
uri = line.substring(line.indexOf(LABEL_KEYWORD1) + LABEL_KEYWORD1.length(), line.indexOf(LABEL_KEYWORD2));
// label = the string after the second keyword
label = line.substring(line.indexOf(LABEL_KEYWORD2) + LABEL_KEYWORD2.length());
existing_label = label_rules.get(uri);
if (existing_label == null) {
label_rules.put(uri, label);
} else {
label_rules.put(uri, existing_label + "," + label);
}
//label_rules.add(new LabelRule(uri,label));
// Check for an ALLOW-rule
} else if (check_rule_syntax(line, ALLOW_KEYWORD1, ALLOW_KEYWORD2)) {
// source = the string between the first and the second keyword
label = line.substring(line.indexOf(ALLOW_KEYWORD1) + ALLOW_KEYWORD1.length(), line.indexOf(ALLOW_KEYWORD2));
// label = the string after the second keyword
uri = line.substring(line.indexOf(ALLOW_KEYWORD2) + ALLOW_KEYWORD2.length());
//allow_rules.add(new AllowRule(uri, label));
existing_label = allow_rules.get(uri);
if (existing_label == null) {
allow_rules.put(uri, label);
} else {
allow_rules.put(uri, existing_label + "," + label);
}
// If it's also no comment, throw an error
} else if (!line.startsWith("#")) {
LOG.error("Error: Could not parse line " +line + " from rules file");
}
}
datainputstream.close();
} catch (IOException e) {
LOG.error("Caught IOException: " + e.getMessage());
e.printStackTrace();
}
LOG.info("Loaded LABEL rules: " + label_rules.toString());
LOG.info("Loaded ALLOW rules: " + allow_rules.toString());
}
// Checks for a line in the rulefile that each keyword exists only once, keyword1 before keyword 2, etc...
public boolean check_rule_syntax(String line, String keyword1, String keyword2){
//keyword1 in the beginning?
if (line.startsWith(keyword1)
//keyword 2 exists?
&& line.contains(keyword2)
//no second keyword1?
&& line.lastIndexOf(keyword1) == 0
//no second keyword2?
&& line.indexOf(keyword2) == line.lastIndexOf(keyword2)) {
return true;
} else {
return false;
}
}
public void process(Exchange exchange) throws Exception {
InstrumentationProcessor instrumentationprocessor;
SendProcessor sendprocessor;
String destination;
String exchange_labels;
String[] rule_labels;
//label the new message if needed
exchange = LabelingProcess(exchange);
if (exchange.getProperty("labels") != null ) {
exchange_labels = exchange.getProperty("labels").toString();
} else {
exchange_labels = "";
}
//figuring out where the message should go to
if (target instanceof InstrumentationProcessor) {
instrumentationprocessor = (InstrumentationProcessor) target;
if (instrumentationprocessor.getProcessor() instanceof SendProcessor) {
sendprocessor = (SendProcessor) instrumentationprocessor.getProcessor();
destination = sendprocessor.getEndpoint().getEndpointUri();
//if it's also no LogProcessor, throw an Error
} else if (instrumentationprocessor.getProcessor() instanceof LogProcessor) {
//nothing to do yet, maybe some logging later
return;
} else {
LOG.error("target is neither an instance of Send- nor Log-Processor: " + target.toString());
return;
}
} else {
LOG.error("target is not an instance of InstrumentactionProcessor");
return;
}
rule_labels = allow_rules.get(destination).split(",");
if (rule_labels == null) {
System.out.println("No rules found for destination: " + destination + ", message will be dropped...");
return;
}
//Check if the message has _ALL_ the required labels. If we miss one, stop
for (String rule : rule_labels) {
if (!check_if_label_exists (rule, exchange_labels)) {
System.out.println("Required label " + rule + " not found, message will be dropped...");
return;
}
}
System.out.println("Message with labels '" + exchange_labels +"' has all required labels ('" + allow_rules.get(destination) + "') for destination '" + destination + "', forwarding...");
target.process(exchange);
}
//check if a label exists in a list of labels
public boolean check_if_label_exists(String label, String labels){
//if there are no requirements we have to fulfill, we return true
if (labels == null) {
return true;
}
//if label is null, but labels isn't, we return false
if (label == null) {
return false;
}
//check for each label if it's contained in the requirements. If not, return false;
if (!labels.equals(label)
&& !labels.contains(label + ",")
&& !labels.contains("," + label)) {
return false;
}
return true;
}
public Exchange LabelingProcess(Exchange exchange) {
String from = exchange.getFromEndpoint().getEndpointUri();
String exchange_labels;
String[] rule_labels;
if (exchange.getProperty("labels") != null ) {
exchange_labels = exchange.getProperty("labels").toString();
} else {
exchange_labels = "";
}
//Check if we have a labeling rule for this source
rule_labels = label_rules.get(from).split(",");
if (rule_labels != null) {
for (String label : rule_labels) {
//If the label already exists, we don't have to do anything, else, we append it
if (!check_if_label_exists(label, exchange_labels)) {
System.out.println("Got a message from " + from + ", will label it with '" + label + "'");
if (exchange_labels == "") {
exchange_labels = label;
} else {
exchange_labels = exchange_labels + "," + label;
}
}
}
exchange.setProperty("labels", exchange_labels);
}
return exchange;
}
@Override
public String toString() {
return "MyProcessor[" + "]";
}
@Override
public boolean process(Exchange exchange, AsyncCallback ac) {
try {
process(exchange);
}catch (Exception e) {
e.printStackTrace();
}
return true;
}
}
|
data-flow-policy/src/main/java/de/fhg/aisec/dfpolicy/MyProcessor.java
|
package de.fhg.aisec.dfpolicy;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.management.InstrumentationProcessor;
import org.apache.camel.processor.LogProcessor;
import org.apache.camel.processor.SendProcessor;
import org.apache.camel.AsyncCallback;
import org.apache.camel.AsyncProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MyProcessor implements AsyncProcessor {
private static final Logger LOG = LoggerFactory.getLogger(MyProcessor.class);
private Processor target;
//private ArrayList<LabelRule> label_rules;
//private ArrayList<AllowRule> allow_rules;
private HashMap<String, String> label_rules;
private HashMap<String, String> allow_rules;
public MyProcessor(Processor target) {
this.target = target;
//label_rules = new ArrayList<LabelRule>();
//allow_rules = new ArrayList<AllowRule>();
label_rules = new HashMap<String, String>();
allow_rules = new HashMap<String, String>();
loadRules("deploy/rules");
}
private void loadRules(String rulefile) {
FileInputStream fileinputstream;
// Get the object of DataInputStream
DataInputStream datainputstream;
BufferedReader bufferedreader;
final String LABEL_KEYWORD1 = "LABEL";
final String LABEL_KEYWORD2 = "AS";
final String ALLOW_KEYWORD1 = "ALLOW";
final String ALLOW_KEYWORD2 = "TO";
String line;
String uri;
String label;
String existing_label;
try {
fileinputstream = new FileInputStream(rulefile);
datainputstream = new DataInputStream(fileinputstream);
bufferedreader = new BufferedReader(new InputStreamReader(datainputstream));
while ((line = bufferedreader.readLine()) != null) {
//Remove unneeded spaces
line = line.replaceAll(" ", "");
//Check if it is a LABEL-rule that contains LABEL and AS, and both only once
if (check_rule_syntax(line, LABEL_KEYWORD1, LABEL_KEYWORD2)) {
// source = the string between the first and the second keyword
uri = line.substring(line.indexOf(LABEL_KEYWORD1) + LABEL_KEYWORD1.length(), line.indexOf(LABEL_KEYWORD2));
// label = the string after the second keyword
label = line.substring(line.indexOf(LABEL_KEYWORD2) + LABEL_KEYWORD2.length());
existing_label = label_rules.get(uri);
if (existing_label == null) {
label_rules.put(uri, label);
} else {
label_rules.put(uri, existing_label + "," + label);
}
//label_rules.add(new LabelRule(uri,label));
// Check for an ALLOW-rule
} else if (check_rule_syntax(line, ALLOW_KEYWORD1, ALLOW_KEYWORD2)) {
// source = the string between the first and the second keyword
label = line.substring(line.indexOf(ALLOW_KEYWORD1) + ALLOW_KEYWORD1.length(), line.indexOf(ALLOW_KEYWORD2));
// label = the string after the second keyword
uri = line.substring(line.indexOf(ALLOW_KEYWORD2) + ALLOW_KEYWORD2.length());
//allow_rules.add(new AllowRule(uri, label));
existing_label = allow_rules.get(uri);
if (existing_label == null) {
allow_rules.put(uri, label);
} else {
allow_rules.put(uri, existing_label + "," + label);
}
// If it's also no comment, throw an error
} else if (!line.startsWith("#")) {
LOG.error("Error: Could not parse line " +line + " from rules file");
}
}
datainputstream.close();
} catch (IOException e) {
LOG.error("Caught IOException: " + e.getMessage());
e.printStackTrace();
}
LOG.info("Loaded LABEL rules: " + label_rules.toString());
LOG.info("Loaded ALLOW rules: " + allow_rules.toString());
}
// Checks for a line in the rulefile that each keyword exists only once, keyword1 before keyword 2, etc...
public boolean check_rule_syntax(String line, String keyword1, String keyword2){
//keyword1 in the beginning?
if (line.startsWith(keyword1)
//keyword 2 exists?
&& line.contains(keyword2)
//no second keyword1?
&& line.lastIndexOf(keyword1) == 0
//no second keyword2?
&& line.indexOf(keyword2) == line.lastIndexOf(keyword2)) {
return true;
} else {
return false;
}
}
public void process(Exchange exchange) throws Exception {
InstrumentationProcessor instrumentationprocessor;
SendProcessor sendprocessor;
String destination;
String exchange_labels;
String rule_labels;
String[] rules;
//label the new message if needed
exchange = LabelingProcess(exchange);
if (exchange.getProperty("labels") != null ) {
exchange_labels = exchange.getProperty("labels").toString();
} else {
exchange_labels = "";
}
//figuring out where the message should go to
if (target instanceof InstrumentationProcessor) {
instrumentationprocessor = (InstrumentationProcessor) target;
if (instrumentationprocessor.getProcessor() instanceof SendProcessor) {
sendprocessor = (SendProcessor) instrumentationprocessor.getProcessor();
destination = sendprocessor.getEndpoint().getEndpointUri();
//if it's also no LogProcessor, throw an Error
} else if (instrumentationprocessor.getProcessor() instanceof LogProcessor) {
//nothing to do yet, maybe some logging later
return;
} else {
LOG.error("target is neither an instance of Send- nor Log-Processor: " + target.toString());
return;
}
} else {
LOG.error("target is not an instance of InstrumentactionProcessor");
return;
}
rule_labels = allow_rules.get(destination);
if (rule_labels == null) {
System.out.println("No rules found for destination: " + destination + ", message will be dropped...");
return;
}
rules = rule_labels.split(",");
for (String rule : rules) {
if (check_if_labels_exists (rule, exchange_labels)) {
System.out.println("Found matching rule for destination " + destination +" with labels '" + exchange_labels + "', forwarding...");
target.process(exchange);
return;
}
}
System.out.println("No matching rules found for labels: " + exchange_labels + ", message will be dropped...");
}
//check if all labels from labels1 exist in labels2, both might be a list of comma-separeted labels
public boolean check_if_labels_exists(String labels1, String labels2){
String[] labels = labels1.split(",");
if (labels == null) {
return false;
}
//if there are no requirements we have to fulfill, we return true
if (labels2 == null) {
return true;
}
//check for each label if it's contained in the requirements. If not, return false;
for (String label : labels) {
if (!labels2.equals(label)
&& !labels2.contains(label + ",")
&& !labels2.contains("," + label)) {
return false;
}
}
return true;
}
public Exchange LabelingProcess(Exchange exchange) {
String from = exchange.getFromEndpoint().getEndpointUri();
String labels_value;
String label;
if (exchange.getProperty("labels") != null ) {
labels_value = exchange.getProperty("labels").toString();
} else {
labels_value = "";
}
System.out.println("Received a message from " + from);
//Check if we have a labeling rule for this uri
label = label_rules.get(from);
if (label != null) {
//If all labels already exists, we don't have to do anything, else, we append it
if (!check_if_labels_exists(label, labels_value)) {
if (labels_value == "") {
labels_value = label;
} else {
//TODO: what if some labels already exists, but some don't?
labels_value = labels_value + "," + label;
}
System.out.println("Got a message from " + from + ", will label it with '" + label + "'");
exchange.setProperty("labels", labels_value);
}
}
return exchange;
}
@Override
public String toString() {
return "MyProcessor[" + "]";
}
@Override
public boolean process(Exchange exchange, AsyncCallback ac) {
try {
process(exchange);
}catch (Exception e) {
e.printStackTrace();
}
return true;
}
}
|
Restructered check_if_label_exists()
|
data-flow-policy/src/main/java/de/fhg/aisec/dfpolicy/MyProcessor.java
|
Restructered check_if_label_exists()
|
|
Java
|
apache-2.0
|
fcfcb4192a4b9c50e33b707eeea8a9ffdb3af70b
| 0
|
ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma
|
/*
* The Gemma project
*
* Copyright (c) 2008 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.web.controller.visualization;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import ubic.gemma.model.expression.bioAssayData.DoubleVectorValueObject;
import ubic.gemma.model.expression.experiment.ExpressionExperiment;
import ubic.gemma.model.expression.experiment.ExpressionExperimentValueObject;
import ubic.gemma.model.genome.Gene;
/**
* Stores expression profile data for plotting.
*
* @author kelsey, paul
* @version $Id$
*/
public class VisualizationValueObject {
private Collection<GeneExpressionProfile> profiles;
private ExpressionExperimentValueObject eevo = null;
private Map<Long, String> colorMap = new HashMap<Long, String>();
private static String[] colors = new String[] { "red", "black", "blue", "green", "orange" };
private static Log log = LogFactory.getLog( VisualizationValueObject.class );
public VisualizationValueObject() {
super();
this.profiles = new HashSet<GeneExpressionProfile>();
}
/**
* @param vectors from a single expression experiment.
* @param genes Is list so that order is gauranteed. Need this so that color's are consistent. Query gene is always
* black, coexpressed is always red.
* @param validatedProbeList Probes which are flagged as 'valid' in some sense. For example, in coexpression plots
* these are probes that provided the coexpression evidence, to differentiate them from the ones which are
* just being displayed because they assay the same gene.
* @throws IllegalArgumentException if vectors are mixed between EEs.
*/
public VisualizationValueObject( Collection<DoubleVectorValueObject> vectors, List<Gene> genes,
Collection<Long> validatedProbeList ) {
this();
int i = 0;
if ( genes.size() > colors.length ) {
// / FIXME
}
for ( Gene g : genes ) {
log.debug( "Gene: " + g.getName() + " color=" + colors[i] );
colorMap.put( g.getId(), colors[i] );
i++;
}
for ( DoubleVectorValueObject vector : vectors ) {
if ( this.eevo == null ) {
setEE( vector.getExpressionExperiment() );
} else if ( !( this.eevo.getId().equals( vector.getExpressionExperiment().getId() ) ) ) {
throw new IllegalArgumentException( "All vectors have to have the same ee for this constructor. ee1: "
+ this.eevo.getId() + " ee2: " + vector.getExpressionExperiment().getId() );
}
String color = null;
// log.info( vector + " GENES=" + StringUtils.join( vector.getGenes(), ',' ) );
for ( Gene g : genes ) {
if ( vector.getGenes().contains( g ) ) {
if ( color != null ) {
/*
* Special color to denote probes that hyb to both genes.
*/
color = "#CCCCCC";
if ( log.isDebugEnabled() )
log.debug( "EE: " + eevo.getId() + "; Probe: " + vector.getDesignElement().getName()
+ " (id=" + vector.getDesignElement().getId()
+ ") matches more than one of the genes" );
} else {
color = colorMap.get( g.getId() );
}
}
}
int valid = 1;
if ( validatedProbeList != null && validatedProbeList.contains( vector.getDesignElement().getId() ) ) {
valid = 2;
}
GeneExpressionProfile profile = new GeneExpressionProfile( vector, color, valid );
profiles.add( profile );
}
}
/**
* @param vectors from a single expression experiment.
* @param genes Is list so that order is gauranteed. Need this so that color's are consistent. Query gene is always
* black, coexpressed is always red.
* @throws IllegalArgumentException if vectors are mixed between EEs.
*/
public VisualizationValueObject( Collection<DoubleVectorValueObject> vectors, List<Gene> genes ) {
this();
int i = 0;
if ( genes.size() > colors.length ) {
// / FIXME
}
for ( Gene g : genes ) {
log.debug( "Gene: " + g.getName() + " color=" + colors[i] );
colorMap.put( g.getId(), colors[i] );
i++;
}
for ( DoubleVectorValueObject vector : vectors ) {
if ( this.eevo == null ) {
setEE( vector.getExpressionExperiment() );
} else if ( !( this.eevo.getId().equals( vector.getExpressionExperiment().getId() ) ) ) {
throw new IllegalArgumentException( "All vectors have to have the same ee for this constructor. ee1: "
+ this.eevo.getId() + " ee2: " + vector.getExpressionExperiment().getId() );
}
String color = null;
// log.info( vector + " GENES=" + StringUtils.join( vector.getGenes(), ',' ) );
for ( Gene g : genes ) {
if ( vector.getGenes().contains( g ) ) {
if ( color != null ) {
/*
* Special color to denote probes that hyb to both genes.
*/
color = "#CCCCCC";
if ( log.isDebugEnabled() )
log.debug( "EE: " + eevo.getId() + "; Probe: " + vector.getDesignElement().getName()
+ " (id=" + vector.getDesignElement().getId()
+ ") matches more than one of the genes" );
} else {
color = colorMap.get( g.getId() );
}
}
}
GeneExpressionProfile profile = new GeneExpressionProfile( vector, color, 1 );
profiles.add( profile );
}
}
public VisualizationValueObject( Collection<DoubleVectorValueObject> vectors, List<Gene> genes,
Collection<Long> validatedProbeList, double minPvalue ) {
this();
int i = 0;
if ( genes.size() > colors.length ) {
// / FIXME
}
for ( Gene g : genes ) {
log.debug( "Gene: " + g.getName() + " color=" + colors[i] );
colorMap.put( g.getId(), colors[i] );
i++;
}
for ( DoubleVectorValueObject vector : vectors ) {
if ( this.eevo == null ) {
setEEwithPvalue( vector.getExpressionExperiment(), minPvalue );
} else if ( !( this.eevo.getId().equals( vector.getExpressionExperiment().getId() ) ) ) {
throw new IllegalArgumentException( "All vectors have to have the same ee for this constructor. ee1: "
+ this.eevo.getId() + " ee2: " + vector.getExpressionExperiment().getId() );
}
String color = null;
// log.info( vector + " GENES=" + StringUtils.join( vector.getGenes(), ',' ) );
for ( Gene g : genes ) {
if ( vector.getGenes().contains( g ) ) {
if ( color != null ) {
/*
* Special color to denote probes that hyb to both genes.
*/
color = "#CCCCCC";
if ( log.isDebugEnabled() )
log.debug( "EE: " + eevo.getId() + "; Probe: " + vector.getDesignElement().getName()
+ " (id=" + vector.getDesignElement().getId()
+ ") matches more than one of the genes" );
} else {
color = colorMap.get( g.getId() );
}
}
}
int valid = 1;
if ( validatedProbeList != null && validatedProbeList.contains( vector.getDesignElement().getId() ) ) {
valid = 2;
}
GeneExpressionProfile profile = new GeneExpressionProfile( vector, color, valid );
profiles.add( profile );
}
}
/**
* @param dvvo
*/
public VisualizationValueObject( DoubleVectorValueObject dvvo ) {
this();
setEE( dvvo.getExpressionExperiment() );
GeneExpressionProfile profile = new GeneExpressionProfile( dvvo, null, 0 );
profiles.add( profile );
}
// ---------------------------------
// Getters and Setters
// ---------------------------------
public ExpressionExperimentValueObject getEevo() {
return eevo;
}
public void setEE( ExpressionExperiment ee ) {
this.eevo = new ExpressionExperimentValueObject();
this.eevo.setId( ee.getId() );
this.eevo.setName( ee.getName() );
this.eevo.setShortName( ee.getShortName() );
this.eevo.setClazz( "ExpressionExperimentValueObject" );
}
public void setEevo( ExpressionExperimentValueObject eevo ) {
this.eevo = eevo;
}
public void setEEwithPvalue( ExpressionExperiment ee, double minP ) {
setEE(ee);
this.eevo.setMinPvalue( minP );
}
public Collection<GeneExpressionProfile> getProfiles() {
return profiles;
}
public void setProfiles( Collection<GeneExpressionProfile> profiles ) {
this.profiles = profiles;
}
}
|
gemma-web/src/main/java/ubic/gemma/web/controller/visualization/VisualizationValueObject.java
|
/*
* The Gemma project
*
* Copyright (c) 2008 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.web.controller.visualization;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import ubic.gemma.model.expression.bioAssayData.DoubleVectorValueObject;
import ubic.gemma.model.expression.experiment.ExpressionExperiment;
import ubic.gemma.model.expression.experiment.ExpressionExperimentValueObject;
import ubic.gemma.model.genome.Gene;
/**
* Stores expression profile data for plotting.
*
* @author kelsey, paul
* @version $Id$
*/
public class VisualizationValueObject {
private Collection<GeneExpressionProfile> profiles;
private ExpressionExperimentValueObject eevo = null;
private Map<Long, String> colorMap = new HashMap<Long, String>();
private static String[] colors = new String[] { "red", "black", "blue", "green", "orange" };
private static Log log = LogFactory.getLog( VisualizationValueObject.class );
public VisualizationValueObject() {
super();
this.profiles = new HashSet<GeneExpressionProfile>();
}
/**
* @param vectors from a single expression experiment.
* @param genes Is list so that order is gauranteed. Need this so that color's are consistent. Query gene is always
* black, coexpressed is always red.
* @param validatedProbeList Probes which are flagged as 'valid' in some sense. For example, in coexpression plots
* these are probes that provided the coexpression evidence, to differentiate them from the ones which are
* just being displayed because they assay the same gene.
* @throws IllegalArgumentException if vectors are mixed between EEs.
*/
public VisualizationValueObject( Collection<DoubleVectorValueObject> vectors, List<Gene> genes,
Collection<Long> validatedProbeList ) {
this();
int i = 0;
if ( genes.size() > colors.length ) {
// / FIXME
}
for ( Gene g : genes ) {
log.debug( "Gene: " + g.getName() + " color=" + colors[i] );
colorMap.put( g.getId(), colors[i] );
i++;
}
for ( DoubleVectorValueObject vector : vectors ) {
if ( this.eevo == null ) {
setEE( vector.getExpressionExperiment() );
} else if ( !( this.eevo.getId().equals( vector.getExpressionExperiment().getId() ) ) ) {
throw new IllegalArgumentException( "All vectors have to have the same ee for this constructor. ee1: "
+ this.eevo.getId() + " ee2: " + vector.getExpressionExperiment().getId() );
}
String color = null;
// log.info( vector + " GENES=" + StringUtils.join( vector.getGenes(), ',' ) );
for ( Gene g : genes ) {
if ( vector.getGenes().contains( g ) ) {
if ( color != null ) {
/*
* Special color to denote probes that hyb to both genes.
*/
color = "#CCCCCC";
if ( log.isDebugEnabled() )
log.debug( "EE: " + eevo.getId() + "; Probe: " + vector.getDesignElement().getName()
+ " (id=" + vector.getDesignElement().getId()
+ ") matches more than one of the genes" );
} else {
color = colorMap.get( g.getId() );
}
}
}
int valid = 1;
if ( validatedProbeList != null && validatedProbeList.contains( vector.getDesignElement().getId() ) ) {
valid = 2;
}
GeneExpressionProfile profile = new GeneExpressionProfile( vector, color, valid );
profiles.add( profile );
}
}
/**
* @param vectors from a single expression experiment.
* @param genes Is list so that order is gauranteed. Need this so that color's are consistent. Query gene is always
* black, coexpressed is always red.
* @throws IllegalArgumentException if vectors are mixed between EEs.
*/
public VisualizationValueObject( Collection<DoubleVectorValueObject> vectors, List<Gene> genes ) {
this();
int i = 0;
if ( genes.size() > colors.length ) {
// / FIXME
}
for ( Gene g : genes ) {
log.debug( "Gene: " + g.getName() + " color=" + colors[i] );
colorMap.put( g.getId(), colors[i] );
i++;
}
for ( DoubleVectorValueObject vector : vectors ) {
if ( this.eevo == null ) {
setEE( vector.getExpressionExperiment() );
} else if ( !( this.eevo.getId().equals( vector.getExpressionExperiment().getId() ) ) ) {
throw new IllegalArgumentException( "All vectors have to have the same ee for this constructor. ee1: "
+ this.eevo.getId() + " ee2: " + vector.getExpressionExperiment().getId() );
}
String color = null;
// log.info( vector + " GENES=" + StringUtils.join( vector.getGenes(), ',' ) );
for ( Gene g : genes ) {
if ( vector.getGenes().contains( g ) ) {
if ( color != null ) {
/*
* Special color to denote probes that hyb to both genes.
*/
color = "#CCCCCC";
if ( log.isDebugEnabled() )
log.debug( "EE: " + eevo.getId() + "; Probe: " + vector.getDesignElement().getName()
+ " (id=" + vector.getDesignElement().getId()
+ ") matches more than one of the genes" );
} else {
color = colorMap.get( g.getId() );
}
}
}
GeneExpressionProfile profile = new GeneExpressionProfile( vector, color, 1 );
profiles.add( profile );
}
}
/**
* @param dvvo
*/
public VisualizationValueObject( DoubleVectorValueObject dvvo ) {
this();
setEE( dvvo.getExpressionExperiment() );
GeneExpressionProfile profile = new GeneExpressionProfile( dvvo, null, 0 );
profiles.add( profile );
}
// ---------------------------------
// Getters and Setters
// ---------------------------------
public ExpressionExperimentValueObject getEevo() {
return eevo;
}
public void setEE( ExpressionExperiment ee ) {
this.eevo = new ExpressionExperimentValueObject();
this.eevo = new ExpressionExperimentValueObject();
this.eevo.setId( ee.getId() );
this.eevo.setName( ee.getName() );
this.eevo.setShortName( ee.getShortName() );
this.eevo.setClazz( "ExpressionExperimentValueObject" );
}
public void setEevo( ExpressionExperimentValueObject eevo ) {
this.eevo = eevo;
}
public Collection<GeneExpressionProfile> getProfiles() {
return profiles;
}
public void setProfiles( Collection<GeneExpressionProfile> profiles ) {
this.profiles = profiles;
}
}
|
add field for min-pvalue
|
gemma-web/src/main/java/ubic/gemma/web/controller/visualization/VisualizationValueObject.java
|
add field for min-pvalue
|
|
Java
|
apache-2.0
|
acdd16b5b18aeb8b5e57c6a59654132cc43a96a5
| 0
|
cscorley/solr-only-mirror,cscorley/solr-only-mirror,cscorley/solr-only-mirror
|
package org.apache.solr.client.solrj.impl;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.ResponseParser;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.IsUpdateRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.Aliases;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.DocRouter;
import org.apache.solr.common.cloud.ImplicitDocRouter;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.cloud.ZooKeeperException;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.solr.common.util.StrUtils;
import org.apache.zookeeper.KeeperException;
/**
* SolrJ client class to communicate with SolrCloud.
* Instances of this class communicate with Zookeeper to discover
* Solr endpoints for SolrCloud collections, and then use the
* {@link LBHttpSolrServer} to issue requests.
*
* This class assumes the id field for your documents is called
* 'id' - if this is not the case, you must set the right name
* with {@link #setIdField(String)}.
*/
public class CloudSolrServer extends SolrServer {
private volatile ZkStateReader zkStateReader;
private String zkHost; // the zk server address
private int zkConnectTimeout = 10000;
private int zkClientTimeout = 10000;
private volatile String defaultCollection;
private final LBHttpSolrServer lbServer;
private final boolean shutdownLBHttpSolrServer;
private HttpClient myClient;
Random rand = new Random();
private Object cachLock = new Object();
// since the state shouldn't change often, should be very cheap reads
private Map<String,List<String>> urlLists = new HashMap<String,List<String>>();
private Map<String,List<String>> leaderUrlLists = new HashMap<String,List<String>>();
private Map<String,List<String>> replicasLists = new HashMap<String,List<String>>();
private volatile int lastClusterStateHashCode;
private final boolean updatesToLeaders;
private boolean parallelUpdates = true;
private ExecutorService threadPool = Executors
.newCachedThreadPool(new SolrjNamedThreadFactory(
"CloudSolrServer ThreadPool"));
private String idField = "id";
private final Set<String> NON_ROUTABLE_PARAMS;
{
NON_ROUTABLE_PARAMS = new HashSet<String>();
NON_ROUTABLE_PARAMS.add(UpdateParams.EXPUNGE_DELETES);
NON_ROUTABLE_PARAMS.add(UpdateParams.MAX_OPTIMIZE_SEGMENTS);
NON_ROUTABLE_PARAMS.add(UpdateParams.COMMIT);
NON_ROUTABLE_PARAMS.add(UpdateParams.WAIT_SEARCHER);
NON_ROUTABLE_PARAMS.add(UpdateParams.OPEN_SEARCHER);
NON_ROUTABLE_PARAMS.add(UpdateParams.SOFT_COMMIT);
NON_ROUTABLE_PARAMS.add(UpdateParams.PREPARE_COMMIT);
NON_ROUTABLE_PARAMS.add(UpdateParams.OPTIMIZE);
// Not supported via SolrCloud
// NON_ROUTABLE_PARAMS.add(UpdateParams.ROLLBACK);
}
/**
* @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
* in the form HOST:PORT.
*/
public CloudSolrServer(String zkHost) throws MalformedURLException {
this.zkHost = zkHost;
this.myClient = HttpClientUtil.createClient(null);
this.lbServer = new LBHttpSolrServer(myClient);
this.updatesToLeaders = true;
shutdownLBHttpSolrServer = true;
}
public CloudSolrServer(String zkHost, boolean updatesToLeaders)
throws MalformedURLException {
this.zkHost = zkHost;
this.myClient = HttpClientUtil.createClient(null);
this.lbServer = new LBHttpSolrServer(myClient);
this.updatesToLeaders = updatesToLeaders;
shutdownLBHttpSolrServer = true;
}
/**
* @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
* in the form HOST:PORT.
* @param lbServer LBHttpSolrServer instance for requests.
*/
public CloudSolrServer(String zkHost, LBHttpSolrServer lbServer) {
this.zkHost = zkHost;
this.lbServer = lbServer;
this.updatesToLeaders = true;
shutdownLBHttpSolrServer = false;
}
/**
* @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
* in the form HOST:PORT.
* @param lbServer LBHttpSolrServer instance for requests.
* @param updatesToLeaders sends updates only to leaders - defaults to true
*/
public CloudSolrServer(String zkHost, LBHttpSolrServer lbServer, boolean updatesToLeaders) {
this.zkHost = zkHost;
this.lbServer = lbServer;
this.updatesToLeaders = updatesToLeaders;
shutdownLBHttpSolrServer = false;
}
public ResponseParser getParser() {
return lbServer.getParser();
}
/**
* Note: This setter method is <b>not thread-safe</b>.
*
* @param processor
* Default Response Parser chosen to parse the response if the parser
* were not specified as part of the request.
* @see org.apache.solr.client.solrj.SolrRequest#getResponseParser()
*/
public void setParser(ResponseParser processor) {
lbServer.setParser(processor);
}
public ZkStateReader getZkStateReader() {
return zkStateReader;
}
/**
* @param idField the field to route documents on.
*/
public void setIdField(String idField) {
this.idField = idField;
}
/**
* @return the field that updates are routed on.
*/
public String getIdField() {
return idField;
}
/** Sets the default collection for request */
public void setDefaultCollection(String collection) {
this.defaultCollection = collection;
}
/** Gets the default collection for request */
public String getDefaultCollection() {
return defaultCollection;
}
/** Set the connect timeout to the zookeeper ensemble in ms */
public void setZkConnectTimeout(int zkConnectTimeout) {
this.zkConnectTimeout = zkConnectTimeout;
}
/** Set the timeout to the zookeeper ensemble in ms */
public void setZkClientTimeout(int zkClientTimeout) {
this.zkClientTimeout = zkClientTimeout;
}
/**
* Connect to the zookeeper ensemble.
* This is an optional method that may be used to force a connect before any other requests are sent.
*
*/
public void connect() {
if (zkStateReader == null) {
synchronized (this) {
if (zkStateReader == null) {
try {
ZkStateReader zk = new ZkStateReader(zkHost, zkConnectTimeout,
zkClientTimeout);
zk.createClusterStateWatchersAndUpdate();
zkStateReader = zk;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
} catch (KeeperException e) {
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
} catch (IOException e) {
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
} catch (TimeoutException e) {
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
}
}
}
}
}
public void setParallelUpdates(boolean parallelUpdates) {
this.parallelUpdates = parallelUpdates;
}
private NamedList directUpdate(AbstractUpdateRequest request, ClusterState clusterState) throws SolrServerException {
UpdateRequest updateRequest = (UpdateRequest) request;
ModifiableSolrParams params = (ModifiableSolrParams) request.getParams();
ModifiableSolrParams routableParams = new ModifiableSolrParams();
ModifiableSolrParams nonRoutableParams = new ModifiableSolrParams();
if(params != null) {
nonRoutableParams.add(params);
routableParams.add(params);
for(String param : NON_ROUTABLE_PARAMS) {
routableParams.remove(param);
}
}
String collection = nonRoutableParams.get("collection", defaultCollection);
if (collection == null) {
throw new SolrServerException("No collection param specified on request and no default collection has been set.");
}
//Check to see if the collection is an alias.
Aliases aliases = zkStateReader.getAliases();
if(aliases != null) {
Map<String, String> collectionAliases = aliases.getCollectionAliasMap();
if(collectionAliases != null && collectionAliases.containsKey(collection)) {
collection = collectionAliases.get(collection);
}
}
DocCollection col = clusterState.getCollection(collection);
DocRouter router = col.getRouter();
if (router instanceof ImplicitDocRouter) {
// short circuit as optimization
return null;
}
//Create the URL map, which is keyed on slice name.
//The value is a list of URLs for each replica in the slice.
//The first value in the list is the leader for the slice.
Map<String,List<String>> urlMap = buildUrlMap(col);
if (urlMap == null) {
// we could not find a leader yet - use unoptimized general path
return null;
}
NamedList exceptions = new NamedList();
NamedList shardResponses = new NamedList();
Map<String, LBHttpSolrServer.Req> routes = updateRequest.getRoutes(router, col, urlMap, routableParams, this.idField);
if (routes == null) {
return null;
}
long start = System.nanoTime();
if (parallelUpdates) {
final Map<String, Future<NamedList<?>>> responseFutures = new HashMap<String, Future<NamedList<?>>>(routes.size());
for (final Map.Entry<String, LBHttpSolrServer.Req> entry : routes.entrySet()) {
final String url = entry.getKey();
final LBHttpSolrServer.Req lbRequest = entry.getValue();
responseFutures.put(url, threadPool.submit(new Callable<NamedList<?>>() {
@Override
public NamedList<?> call() throws Exception {
return lbServer.request(lbRequest).getResponse();
}
}));
}
for (final Map.Entry<String, Future<NamedList<?>>> entry: responseFutures.entrySet()) {
final String url = entry.getKey();
final Future<NamedList<?>> responseFuture = entry.getValue();
try {
shardResponses.add(url, responseFuture.get());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} catch (ExecutionException e) {
exceptions.add(url, e.getCause());
}
}
if (exceptions.size() > 0) {
throw new RouteException(ErrorCode.SERVER_ERROR, exceptions, routes);
}
} else {
for (Map.Entry<String, LBHttpSolrServer.Req> entry : routes.entrySet()) {
String url = entry.getKey();
LBHttpSolrServer.Req lbRequest = entry.getValue();
try {
NamedList rsp = lbServer.request(lbRequest).getResponse();
shardResponses.add(url, rsp);
} catch (Exception e) {
throw new SolrServerException(e);
}
}
}
UpdateRequest nonRoutableRequest = null;
List<String> deleteQuery = updateRequest.getDeleteQuery();
if (deleteQuery != null && deleteQuery.size() > 0) {
UpdateRequest deleteQueryRequest = new UpdateRequest();
deleteQueryRequest.setDeleteQuery(deleteQuery);
nonRoutableRequest = deleteQueryRequest;
}
Set<String> paramNames = nonRoutableParams.getParameterNames();
Set<String> intersection = new HashSet<String>(paramNames);
intersection.retainAll(NON_ROUTABLE_PARAMS);
if (nonRoutableRequest != null || intersection.size() > 0) {
if (nonRoutableRequest == null) {
nonRoutableRequest = new UpdateRequest();
}
nonRoutableRequest.setParams(nonRoutableParams);
List<String> urlList = new ArrayList<String>();
urlList.addAll(routes.keySet());
Collections.shuffle(urlList, rand);
LBHttpSolrServer.Req req = new LBHttpSolrServer.Req(nonRoutableRequest, urlList);
try {
LBHttpSolrServer.Rsp rsp = lbServer.request(req);
shardResponses.add(urlList.get(0), rsp.getResponse());
} catch (Exception e) {
throw new SolrException(ErrorCode.SERVER_ERROR, urlList.get(0), e);
}
}
long end = System.nanoTime();
RouteResponse rr = condenseResponse(shardResponses, (long)((end - start)/1000000));
rr.setRouteResponses(shardResponses);
rr.setRoutes(routes);
return rr;
}
private Map<String,List<String>> buildUrlMap(DocCollection col) {
Map<String, List<String>> urlMap = new HashMap<String, List<String>>();
Collection<Slice> slices = col.getActiveSlices();
Iterator<Slice> sliceIterator = slices.iterator();
while (sliceIterator.hasNext()) {
Slice slice = sliceIterator.next();
String name = slice.getName();
List<String> urls = new ArrayList<String>();
Replica leader = slice.getLeader();
if (leader == null) {
// take unoptimized general path - we cannot find a leader yet
return null;
}
ZkCoreNodeProps zkProps = new ZkCoreNodeProps(leader);
String url = zkProps.getBaseUrl() + "/" + col.getName();
urls.add(url);
Collection<Replica> replicas = slice.getReplicas();
Iterator<Replica> replicaIterator = replicas.iterator();
while (replicaIterator.hasNext()) {
Replica replica = replicaIterator.next();
if (!replica.getNodeName().equals(leader.getNodeName()) &&
!replica.getName().equals(leader.getName())) {
ZkCoreNodeProps zkProps1 = new ZkCoreNodeProps(replica);
String url1 = zkProps1.getBaseUrl() + "/" + col.getName();
urls.add(url1);
}
}
urlMap.put(name, urls);
}
return urlMap;
}
public RouteResponse condenseResponse(NamedList response, long timeMillis) {
RouteResponse condensed = new RouteResponse();
int status = 0;
for(int i=0; i<response.size(); i++) {
NamedList shardResponse = (NamedList)response.getVal(i);
NamedList header = (NamedList)shardResponse.get("responseHeader");
Integer shardStatus = (Integer)header.get("status");
int s = shardStatus.intValue();
if(s > 0) {
status = s;
}
}
NamedList cheader = new NamedList();
cheader.add("status", status);
cheader.add("QTime", timeMillis);
condensed.add("responseHeader", cheader);
return condensed;
}
class RouteResponse extends NamedList {
private NamedList routeResponses;
private Map<String, LBHttpSolrServer.Req> routes;
public void setRouteResponses(NamedList routeResponses) {
this.routeResponses = routeResponses;
}
public NamedList getRouteResponses() {
return routeResponses;
}
public void setRoutes(Map<String, LBHttpSolrServer.Req> routes) {
this.routes = routes;
}
public Map<String, LBHttpSolrServer.Req> getRoutes() {
return routes;
}
}
class RouteException extends SolrException {
private NamedList exceptions;
private Map<String, LBHttpSolrServer.Req> routes;
public RouteException(ErrorCode errorCode, NamedList exceptions, Map<String, LBHttpSolrServer.Req> routes){
super(errorCode, ((Exception)exceptions.getVal(0)).getMessage(), (Exception)exceptions.getVal(0));
this.exceptions = exceptions;
this.routes = routes;
}
public NamedList getExceptions() {
return exceptions;
}
public Map<String, LBHttpSolrServer.Req> getRoutes() {
return this.routes;
}
}
@Override
public NamedList<Object> request(SolrRequest request)
throws SolrServerException, IOException {
connect();
ClusterState clusterState = zkStateReader.getClusterState();
boolean sendToLeaders = false;
List<String> replicas = null;
if (request instanceof IsUpdateRequest) {
if(request instanceof UpdateRequest) {
NamedList response = directUpdate((AbstractUpdateRequest)request,clusterState);
if(response != null) {
return response;
}
}
sendToLeaders = true;
replicas = new ArrayList<String>();
}
SolrParams reqParams = request.getParams();
if (reqParams == null) {
reqParams = new ModifiableSolrParams();
}
List<String> theUrlList = new ArrayList<String>();
if (request.getPath().equals("/admin/collections") || request.getPath().equals("/admin/cores")) {
Set<String> liveNodes = clusterState.getLiveNodes();
for (String liveNode : liveNodes) {
int splitPointBetweenHostPortAndContext = liveNode.indexOf("_");
theUrlList.add("http://"
+ liveNode.substring(0, splitPointBetweenHostPortAndContext) + "/"
+ URLDecoder.decode(liveNode, "UTF-8").substring(splitPointBetweenHostPortAndContext + 1));
}
} else {
String collection = reqParams.get("collection", defaultCollection);
if (collection == null) {
throw new SolrServerException(
"No collection param specified on request and no default collection has been set.");
}
Set<String> collectionsList = getCollectionList(clusterState, collection);
if (collectionsList.size() == 0) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection: " + collection);
}
collection = collectionsList.iterator().next();
StringBuilder collectionString = new StringBuilder();
Iterator<String> it = collectionsList.iterator();
for (int i = 0; i < collectionsList.size(); i++) {
String col = it.next();
collectionString.append(col);
if (i < collectionsList.size() - 1) {
collectionString.append(",");
}
}
// TODO: not a big deal because of the caching, but we could avoid looking
// at every shard
// when getting leaders if we tweaked some things
// Retrieve slices from the cloud state and, for each collection
// specified,
// add it to the Map of slices.
Map<String,Slice> slices = new HashMap<String,Slice>();
for (String collectionName : collectionsList) {
Collection<Slice> colSlices = clusterState.getActiveSlices(collectionName);
if (colSlices == null) {
throw new SolrServerException("Could not find collection:" + collectionName);
}
ClientUtils.addSlices(slices, collectionName, colSlices, true);
}
Set<String> liveNodes = clusterState.getLiveNodes();
synchronized (cachLock) {
List<String> leaderUrlList = leaderUrlLists.get(collection);
List<String> urlList = urlLists.get(collection);
List<String> replicasList = replicasLists.get(collection);
if ((sendToLeaders && leaderUrlList == null)
|| (!sendToLeaders && urlList == null)
|| clusterState.hashCode() != this.lastClusterStateHashCode) {
// build a map of unique nodes
// TODO: allow filtering by group, role, etc
Map<String,ZkNodeProps> nodes = new HashMap<String,ZkNodeProps>();
List<String> urlList2 = new ArrayList<String>();
for (Slice slice : slices.values()) {
for (ZkNodeProps nodeProps : slice.getReplicasMap().values()) {
ZkCoreNodeProps coreNodeProps = new ZkCoreNodeProps(nodeProps);
String node = coreNodeProps.getNodeName();
if (!liveNodes.contains(coreNodeProps.getNodeName())
|| !coreNodeProps.getState().equals(ZkStateReader.ACTIVE)) continue;
if (nodes.put(node, nodeProps) == null) {
if (!sendToLeaders
|| (sendToLeaders && coreNodeProps.isLeader())) {
String url = coreNodeProps.getCoreUrl();
urlList2.add(url);
} else if (sendToLeaders) {
String url = coreNodeProps.getCoreUrl();
replicas.add(url);
}
}
}
}
if (sendToLeaders) {
this.leaderUrlLists.put(collection, urlList2);
leaderUrlList = urlList2;
this.replicasLists.put(collection, replicas);
replicasList = replicas;
} else {
this.urlLists.put(collection, urlList2);
urlList = urlList2;
}
this.lastClusterStateHashCode = clusterState.hashCode();
}
if (sendToLeaders) {
theUrlList = new ArrayList<String>(leaderUrlList.size());
theUrlList.addAll(leaderUrlList);
} else {
theUrlList = new ArrayList<String>(urlList.size());
theUrlList.addAll(urlList);
}
Collections.shuffle(theUrlList, rand);
if (sendToLeaders) {
ArrayList<String> theReplicas = new ArrayList<String>(
replicasList.size());
theReplicas.addAll(replicasList);
Collections.shuffle(theReplicas, rand);
// System.out.println("leaders:" + theUrlList);
// System.out.println("replicas:" + theReplicas);
theUrlList.addAll(theReplicas);
}
}
}
// System.out.println("########################## MAKING REQUEST TO " +
// theUrlList);
LBHttpSolrServer.Req req = new LBHttpSolrServer.Req(request, theUrlList);
LBHttpSolrServer.Rsp rsp = lbServer.request(req);
return rsp.getResponse();
}
private Set<String> getCollectionList(ClusterState clusterState,
String collection) {
// Extract each comma separated collection name and store in a List.
List<String> rawCollectionsList = StrUtils.splitSmart(collection, ",", true);
Set<String> collectionsList = new HashSet<String>();
// validate collections
for (String collectionName : rawCollectionsList) {
if (!clusterState.getCollections().contains(collectionName)) {
Aliases aliases = zkStateReader.getAliases();
String alias = aliases.getCollectionAlias(collectionName);
if (alias != null) {
List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
collectionsList.addAll(aliasList);
continue;
}
throw new SolrException(ErrorCode.BAD_REQUEST, "Collection not found: " + collectionName);
}
collectionsList.add(collectionName);
}
return collectionsList;
}
@Override
public void shutdown() {
if (zkStateReader != null) {
synchronized(this) {
if (zkStateReader!= null)
zkStateReader.close();
zkStateReader = null;
}
}
if (shutdownLBHttpSolrServer) {
lbServer.shutdown();
}
if (myClient!=null) {
myClient.getConnectionManager().shutdown();
}
if(this.threadPool != null && !this.threadPool.isShutdown()) {
this.threadPool.shutdown();
}
}
public LBHttpSolrServer getLbServer() {
return lbServer;
}
public boolean isUpdatesToLeaders() {
return updatesToLeaders;
}
// for tests
Map<String,List<String>> getUrlLists() {
return urlLists;
}
//for tests
Map<String,List<String>> getLeaderUrlLists() {
return leaderUrlLists;
}
//for tests
Map<String,List<String>> getReplicasLists() {
return replicasLists;
}
}
|
solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrServer.java
|
package org.apache.solr.client.solrj.impl;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.ResponseParser;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.IsUpdateRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.Aliases;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.DocRouter;
import org.apache.solr.common.cloud.ImplicitDocRouter;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.cloud.ZooKeeperException;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.solr.common.util.StrUtils;
import org.apache.zookeeper.KeeperException;
/**
* SolrJ client class to communicate with SolrCloud.
* Instances of this class communicate with Zookeeper to discover
* Solr endpoints for SolrCloud collections, and then use the
* {@link LBHttpSolrServer} to issue requests.
*
* This class assumes the id field for your documents is called
* 'id' - if this is not the case, you must set the right name
* with {@link #setIdField(String)}.
*/
public class CloudSolrServer extends SolrServer {
private volatile ZkStateReader zkStateReader;
private String zkHost; // the zk server address
private int zkConnectTimeout = 10000;
private int zkClientTimeout = 10000;
private volatile String defaultCollection;
private final LBHttpSolrServer lbServer;
private final boolean shutdownLBHttpSolrServer;
private HttpClient myClient;
Random rand = new Random();
private Object cachLock = new Object();
// since the state shouldn't change often, should be very cheap reads
private Map<String,List<String>> urlLists = new HashMap<String,List<String>>();
private Map<String,List<String>> leaderUrlLists = new HashMap<String,List<String>>();
private Map<String,List<String>> replicasLists = new HashMap<String,List<String>>();
private volatile int lastClusterStateHashCode;
private final boolean updatesToLeaders;
private boolean parallelUpdates = true;
private ExecutorService threadPool = Executors
.newCachedThreadPool(new SolrjNamedThreadFactory(
"CloudSolrServer ThreadPool"));
private String idField = "id";
private final Set<String> NON_ROUTABLE_PARAMS;
{
NON_ROUTABLE_PARAMS = new HashSet<String>();
NON_ROUTABLE_PARAMS.add(UpdateParams.EXPUNGE_DELETES);
NON_ROUTABLE_PARAMS.add(UpdateParams.MAX_OPTIMIZE_SEGMENTS);
NON_ROUTABLE_PARAMS.add(UpdateParams.COMMIT);
NON_ROUTABLE_PARAMS.add(UpdateParams.WAIT_SEARCHER);
NON_ROUTABLE_PARAMS.add(UpdateParams.OPEN_SEARCHER);
NON_ROUTABLE_PARAMS.add(UpdateParams.SOFT_COMMIT);
NON_ROUTABLE_PARAMS.add(UpdateParams.PREPARE_COMMIT);
NON_ROUTABLE_PARAMS.add(UpdateParams.OPTIMIZE);
// Not supported via SolrCloud
// NON_ROUTABLE_PARAMS.add(UpdateParams.ROLLBACK);
}
/**
* @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
* in the form HOST:PORT.
*/
public CloudSolrServer(String zkHost) throws MalformedURLException {
this.zkHost = zkHost;
this.myClient = HttpClientUtil.createClient(null);
this.lbServer = new LBHttpSolrServer(myClient);
this.updatesToLeaders = true;
shutdownLBHttpSolrServer = true;
}
public CloudSolrServer(String zkHost, boolean updatesToLeaders)
throws MalformedURLException {
this.zkHost = zkHost;
this.myClient = HttpClientUtil.createClient(null);
this.lbServer = new LBHttpSolrServer(myClient);
this.updatesToLeaders = updatesToLeaders;
shutdownLBHttpSolrServer = true;
}
/**
* @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
* in the form HOST:PORT.
* @param lbServer LBHttpSolrServer instance for requests.
*/
public CloudSolrServer(String zkHost, LBHttpSolrServer lbServer) {
this.zkHost = zkHost;
this.lbServer = lbServer;
this.updatesToLeaders = true;
shutdownLBHttpSolrServer = false;
}
/**
* @param zkHost The client endpoint of the zookeeper quorum containing the cloud state,
* in the form HOST:PORT.
* @param lbServer LBHttpSolrServer instance for requests.
* @param updatesToLeaders sends updates only to leaders - defaults to true
*/
public CloudSolrServer(String zkHost, LBHttpSolrServer lbServer, boolean updatesToLeaders) {
this.zkHost = zkHost;
this.lbServer = lbServer;
this.updatesToLeaders = updatesToLeaders;
shutdownLBHttpSolrServer = false;
}
public ResponseParser getParser() {
return lbServer.getParser();
}
/**
* Note: This setter method is <b>not thread-safe</b>.
*
* @param processor
* Default Response Parser chosen to parse the response if the parser
* were not specified as part of the request.
* @see org.apache.solr.client.solrj.SolrRequest#getResponseParser()
*/
public void setParser(ResponseParser processor) {
lbServer.setParser(processor);
}
public ZkStateReader getZkStateReader() {
return zkStateReader;
}
/**
* @param idField the field to route documents on.
*/
public void setIdField(String idField) {
this.idField = idField;
}
/**
* @return the field that updates are routed on.
*/
public String getIdField() {
return idField;
}
/** Sets the default collection for request */
public void setDefaultCollection(String collection) {
this.defaultCollection = collection;
}
/** Gets the default collection for request */
public String getDefaultCollection() {
return defaultCollection;
}
/** Set the connect timeout to the zookeeper ensemble in ms */
public void setZkConnectTimeout(int zkConnectTimeout) {
this.zkConnectTimeout = zkConnectTimeout;
}
/** Set the timeout to the zookeeper ensemble in ms */
public void setZkClientTimeout(int zkClientTimeout) {
this.zkClientTimeout = zkClientTimeout;
}
/**
* Connect to the zookeeper ensemble.
* This is an optional method that may be used to force a connect before any other requests are sent.
*
*/
public void connect() {
if (zkStateReader == null) {
synchronized (this) {
if (zkStateReader == null) {
try {
ZkStateReader zk = new ZkStateReader(zkHost, zkConnectTimeout,
zkClientTimeout);
zk.createClusterStateWatchersAndUpdate();
zkStateReader = zk;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
} catch (KeeperException e) {
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
} catch (IOException e) {
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
} catch (TimeoutException e) {
throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
"", e);
}
}
}
}
}
public void setParallelUpdates(boolean parallelUpdates) {
this.parallelUpdates = parallelUpdates;
}
private NamedList directUpdate(AbstractUpdateRequest request, ClusterState clusterState) throws SolrServerException {
UpdateRequest updateRequest = (UpdateRequest) request;
ModifiableSolrParams params = (ModifiableSolrParams) request.getParams();
ModifiableSolrParams routableParams = new ModifiableSolrParams();
ModifiableSolrParams nonRoutableParams = new ModifiableSolrParams();
if(params != null) {
nonRoutableParams.add(params);
routableParams.add(params);
for(String param : NON_ROUTABLE_PARAMS) {
routableParams.remove(param);
}
}
String collection = nonRoutableParams.get("collection", defaultCollection);
if (collection == null) {
throw new SolrServerException("No collection param specified on request and no default collection has been set.");
}
//Check to see if the collection is an alias.
Aliases aliases = zkStateReader.getAliases();
if(aliases != null) {
Map<String, String> collectionAliases = aliases.getCollectionAliasMap();
if(collectionAliases != null && collectionAliases.containsKey(collection)) {
collection = collectionAliases.get(collection);
}
}
DocCollection col = clusterState.getCollection(collection);
DocRouter router = col.getRouter();
if (router instanceof ImplicitDocRouter) {
// short circuit as optimization
return null;
}
//Create the URL map, which is keyed on slice name.
//The value is a list of URLs for each replica in the slice.
//The first value in the list is the leader for the slice.
Map<String,List<String>> urlMap = buildUrlMap(col);
NamedList exceptions = new NamedList();
NamedList shardResponses = new NamedList();
Map<String, LBHttpSolrServer.Req> routes = updateRequest.getRoutes(router, col, urlMap, routableParams, this.idField);
if (routes == null) {
return null;
}
long start = System.nanoTime();
if (parallelUpdates) {
final Map<String, Future<NamedList<?>>> responseFutures = new HashMap<String, Future<NamedList<?>>>();
for (final Map.Entry<String, LBHttpSolrServer.Req> entry : routes.entrySet()) {
final String url = entry.getKey();
final LBHttpSolrServer.Req lbRequest = entry.getValue();
responseFutures.put(url, threadPool.submit(new Callable<NamedList<?>>() {
@Override
public NamedList<?> call() throws Exception {
return lbServer.request(lbRequest).getResponse();
}
}));
}
for (final Map.Entry<String, Future<NamedList<?>>> entry: responseFutures.entrySet()) {
final String url = entry.getKey();
final Future<NamedList<?>> responseFuture = entry.getValue();
try {
shardResponses.add(url, responseFuture.get());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
} catch (ExecutionException e) {
exceptions.add(url, e.getCause());
}
}
if (exceptions.size() > 0) {
throw new RouteException(ErrorCode.SERVER_ERROR, exceptions, routes);
}
} else {
for (Map.Entry<String, LBHttpSolrServer.Req> entry : routes.entrySet()) {
String url = entry.getKey();
LBHttpSolrServer.Req lbRequest = entry.getValue();
try {
NamedList rsp = lbServer.request(lbRequest).getResponse();
shardResponses.add(url, rsp);
} catch (Exception e) {
throw new SolrServerException(e);
}
}
}
UpdateRequest nonRoutableRequest = null;
List<String> deleteQuery = updateRequest.getDeleteQuery();
if (deleteQuery != null && deleteQuery.size() > 0) {
UpdateRequest deleteQueryRequest = new UpdateRequest();
deleteQueryRequest.setDeleteQuery(deleteQuery);
nonRoutableRequest = deleteQueryRequest;
}
Set<String> paramNames = nonRoutableParams.getParameterNames();
Set<String> intersection = new HashSet<String>(paramNames);
intersection.retainAll(NON_ROUTABLE_PARAMS);
if (nonRoutableRequest != null || intersection.size() > 0) {
if (nonRoutableRequest == null) {
nonRoutableRequest = new UpdateRequest();
}
nonRoutableRequest.setParams(nonRoutableParams);
List<String> urlList = new ArrayList<String>();
urlList.addAll(routes.keySet());
Collections.shuffle(urlList, rand);
LBHttpSolrServer.Req req = new LBHttpSolrServer.Req(nonRoutableRequest, urlList);
try {
LBHttpSolrServer.Rsp rsp = lbServer.request(req);
shardResponses.add(urlList.get(0), rsp.getResponse());
} catch (Exception e) {
throw new SolrException(ErrorCode.SERVER_ERROR, urlList.get(0), e);
}
}
long end = System.nanoTime();
RouteResponse rr = condenseResponse(shardResponses, (long)((end - start)/1000000));
rr.setRouteResponses(shardResponses);
rr.setRoutes(routes);
return rr;
}
private Map<String,List<String>> buildUrlMap(DocCollection col) {
Map<String, List<String>> urlMap = new HashMap<String, List<String>>();
Collection<Slice> slices = col.getActiveSlices();
Iterator<Slice> sliceIterator = slices.iterator();
while (sliceIterator.hasNext()) {
Slice slice = sliceIterator.next();
String name = slice.getName();
List<String> urls = new ArrayList<String>();
Replica leader = slice.getLeader();
ZkCoreNodeProps zkProps = new ZkCoreNodeProps(leader);
String url = zkProps.getBaseUrl() + "/" + col.getName();
urls.add(url);
Collection<Replica> replicas = slice.getReplicas();
Iterator<Replica> replicaIterator = replicas.iterator();
while (replicaIterator.hasNext()) {
Replica replica = replicaIterator.next();
if (!replica.getNodeName().equals(leader.getNodeName()) &&
!replica.getName().equals(leader.getName())) {
ZkCoreNodeProps zkProps1 = new ZkCoreNodeProps(replica);
String url1 = zkProps1.getBaseUrl() + "/" + col.getName();
urls.add(url1);
}
}
urlMap.put(name, urls);
}
return urlMap;
}
public RouteResponse condenseResponse(NamedList response, long timeMillis) {
RouteResponse condensed = new RouteResponse();
int status = 0;
for(int i=0; i<response.size(); i++) {
NamedList shardResponse = (NamedList)response.getVal(i);
NamedList header = (NamedList)shardResponse.get("responseHeader");
Integer shardStatus = (Integer)header.get("status");
int s = shardStatus.intValue();
if(s > 0) {
status = s;
}
}
NamedList cheader = new NamedList();
cheader.add("status", status);
cheader.add("QTime", timeMillis);
condensed.add("responseHeader", cheader);
return condensed;
}
class RouteResponse extends NamedList {
private NamedList routeResponses;
private Map<String, LBHttpSolrServer.Req> routes;
public void setRouteResponses(NamedList routeResponses) {
this.routeResponses = routeResponses;
}
public NamedList getRouteResponses() {
return routeResponses;
}
public void setRoutes(Map<String, LBHttpSolrServer.Req> routes) {
this.routes = routes;
}
public Map<String, LBHttpSolrServer.Req> getRoutes() {
return routes;
}
}
class RouteException extends SolrException {
private NamedList exceptions;
private Map<String, LBHttpSolrServer.Req> routes;
public RouteException(ErrorCode errorCode, NamedList exceptions, Map<String, LBHttpSolrServer.Req> routes){
super(errorCode, ((Exception)exceptions.getVal(0)).getMessage(), (Exception)exceptions.getVal(0));
this.exceptions = exceptions;
this.routes = routes;
}
public NamedList getExceptions() {
return exceptions;
}
public Map<String, LBHttpSolrServer.Req> getRoutes() {
return this.routes;
}
}
@Override
public NamedList<Object> request(SolrRequest request)
throws SolrServerException, IOException {
connect();
ClusterState clusterState = zkStateReader.getClusterState();
boolean sendToLeaders = false;
List<String> replicas = null;
if (request instanceof IsUpdateRequest) {
if(request instanceof UpdateRequest) {
NamedList response = directUpdate((AbstractUpdateRequest)request,clusterState);
if(response != null) {
return response;
}
}
sendToLeaders = true;
replicas = new ArrayList<String>();
}
SolrParams reqParams = request.getParams();
if (reqParams == null) {
reqParams = new ModifiableSolrParams();
}
List<String> theUrlList = new ArrayList<String>();
if (request.getPath().equals("/admin/collections") || request.getPath().equals("/admin/cores")) {
Set<String> liveNodes = clusterState.getLiveNodes();
for (String liveNode : liveNodes) {
int splitPointBetweenHostPortAndContext = liveNode.indexOf("_");
theUrlList.add("http://"
+ liveNode.substring(0, splitPointBetweenHostPortAndContext) + "/"
+ URLDecoder.decode(liveNode, "UTF-8").substring(splitPointBetweenHostPortAndContext + 1));
}
} else {
String collection = reqParams.get("collection", defaultCollection);
if (collection == null) {
throw new SolrServerException(
"No collection param specified on request and no default collection has been set.");
}
Set<String> collectionsList = getCollectionList(clusterState, collection);
if (collectionsList.size() == 0) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Could not find collection: " + collection);
}
collection = collectionsList.iterator().next();
StringBuilder collectionString = new StringBuilder();
Iterator<String> it = collectionsList.iterator();
for (int i = 0; i < collectionsList.size(); i++) {
String col = it.next();
collectionString.append(col);
if (i < collectionsList.size() - 1) {
collectionString.append(",");
}
}
// TODO: not a big deal because of the caching, but we could avoid looking
// at every shard
// when getting leaders if we tweaked some things
// Retrieve slices from the cloud state and, for each collection
// specified,
// add it to the Map of slices.
Map<String,Slice> slices = new HashMap<String,Slice>();
for (String collectionName : collectionsList) {
Collection<Slice> colSlices = clusterState.getActiveSlices(collectionName);
if (colSlices == null) {
throw new SolrServerException("Could not find collection:" + collectionName);
}
ClientUtils.addSlices(slices, collectionName, colSlices, true);
}
Set<String> liveNodes = clusterState.getLiveNodes();
synchronized (cachLock) {
List<String> leaderUrlList = leaderUrlLists.get(collection);
List<String> urlList = urlLists.get(collection);
List<String> replicasList = replicasLists.get(collection);
if ((sendToLeaders && leaderUrlList == null)
|| (!sendToLeaders && urlList == null)
|| clusterState.hashCode() != this.lastClusterStateHashCode) {
// build a map of unique nodes
// TODO: allow filtering by group, role, etc
Map<String,ZkNodeProps> nodes = new HashMap<String,ZkNodeProps>();
List<String> urlList2 = new ArrayList<String>();
for (Slice slice : slices.values()) {
for (ZkNodeProps nodeProps : slice.getReplicasMap().values()) {
ZkCoreNodeProps coreNodeProps = new ZkCoreNodeProps(nodeProps);
String node = coreNodeProps.getNodeName();
if (!liveNodes.contains(coreNodeProps.getNodeName())
|| !coreNodeProps.getState().equals(ZkStateReader.ACTIVE)) continue;
if (nodes.put(node, nodeProps) == null) {
if (!sendToLeaders
|| (sendToLeaders && coreNodeProps.isLeader())) {
String url = coreNodeProps.getCoreUrl();
urlList2.add(url);
} else if (sendToLeaders) {
String url = coreNodeProps.getCoreUrl();
replicas.add(url);
}
}
}
}
if (sendToLeaders) {
this.leaderUrlLists.put(collection, urlList2);
leaderUrlList = urlList2;
this.replicasLists.put(collection, replicas);
replicasList = replicas;
} else {
this.urlLists.put(collection, urlList2);
urlList = urlList2;
}
this.lastClusterStateHashCode = clusterState.hashCode();
}
if (sendToLeaders) {
theUrlList = new ArrayList<String>(leaderUrlList.size());
theUrlList.addAll(leaderUrlList);
} else {
theUrlList = new ArrayList<String>(urlList.size());
theUrlList.addAll(urlList);
}
Collections.shuffle(theUrlList, rand);
if (sendToLeaders) {
ArrayList<String> theReplicas = new ArrayList<String>(
replicasList.size());
theReplicas.addAll(replicasList);
Collections.shuffle(theReplicas, rand);
// System.out.println("leaders:" + theUrlList);
// System.out.println("replicas:" + theReplicas);
theUrlList.addAll(theReplicas);
}
}
}
// System.out.println("########################## MAKING REQUEST TO " +
// theUrlList);
LBHttpSolrServer.Req req = new LBHttpSolrServer.Req(request, theUrlList);
LBHttpSolrServer.Rsp rsp = lbServer.request(req);
return rsp.getResponse();
}
private Set<String> getCollectionList(ClusterState clusterState,
String collection) {
// Extract each comma separated collection name and store in a List.
List<String> rawCollectionsList = StrUtils.splitSmart(collection, ",", true);
Set<String> collectionsList = new HashSet<String>();
// validate collections
for (String collectionName : rawCollectionsList) {
if (!clusterState.getCollections().contains(collectionName)) {
Aliases aliases = zkStateReader.getAliases();
String alias = aliases.getCollectionAlias(collectionName);
if (alias != null) {
List<String> aliasList = StrUtils.splitSmart(alias, ",", true);
collectionsList.addAll(aliasList);
continue;
}
throw new SolrException(ErrorCode.BAD_REQUEST, "Collection not found: " + collectionName);
}
collectionsList.add(collectionName);
}
return collectionsList;
}
@Override
public void shutdown() {
if (zkStateReader != null) {
synchronized(this) {
if (zkStateReader!= null)
zkStateReader.close();
zkStateReader = null;
}
}
if (shutdownLBHttpSolrServer) {
lbServer.shutdown();
}
if (myClient!=null) {
myClient.getConnectionManager().shutdown();
}
if(this.threadPool != null && !this.threadPool.isShutdown()) {
this.threadPool.shutdown();
}
}
public LBHttpSolrServer getLbServer() {
return lbServer;
}
public boolean isUpdatesToLeaders() {
return updatesToLeaders;
}
// for tests
Map<String,List<String>> getUrlLists() {
return urlLists;
}
//for tests
Map<String,List<String>> getLeaderUrlLists() {
return leaderUrlLists;
}
//for tests
Map<String,List<String>> getReplicasLists() {
return replicasLists;
}
}
|
SOLR-4816: deal with leader=null case and init map with known size
git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1524170 13f79535-47bb-0310-9956-ffa450edef68
|
solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrServer.java
|
SOLR-4816: deal with leader=null case and init map with known size
|
|
Java
|
apache-2.0
|
c87602b05304439859b6b31d43252b96a9d90482
| 0
|
etnetera/jmeter,kyroskoh/jmeter,tuanhq/jmeter,ubikloadpack/jmeter,irfanah/jmeter,d0k1/jmeter,kyroskoh/jmeter,tuanhq/jmeter,ra0077/jmeter,ubikfsabbe/jmeter,vherilier/jmeter,liwangbest/jmeter,thomsonreuters/jmeter,fj11/jmeter,ThiagoGarciaAlves/jmeter,vherilier/jmeter,kyroskoh/jmeter,ThiagoGarciaAlves/jmeter,ubikloadpack/jmeter,hemikak/jmeter,d0k1/jmeter,ThiagoGarciaAlves/jmeter,d0k1/jmeter,thomsonreuters/jmeter,etnetera/jmeter,max3163/jmeter,vherilier/jmeter,etnetera/jmeter,d0k1/jmeter,irfanah/jmeter,ra0077/jmeter,tuanhq/jmeter,hizhangqi/jmeter-1,irfanah/jmeter,ubikloadpack/jmeter,fj11/jmeter,etnetera/jmeter,ra0077/jmeter,max3163/jmeter,ubikfsabbe/jmeter,hizhangqi/jmeter-1,ra0077/jmeter,kschroeder/jmeter,vherilier/jmeter,max3163/jmeter,DoctorQ/jmeter,ubikloadpack/jmeter,kschroeder/jmeter,fj11/jmeter,ubikfsabbe/jmeter,hemikak/jmeter,ubikfsabbe/jmeter,hemikak/jmeter,thomsonreuters/jmeter,liwangbest/jmeter,hizhangqi/jmeter-1,hemikak/jmeter,DoctorQ/jmeter,liwangbest/jmeter,DoctorQ/jmeter,kschroeder/jmeter,max3163/jmeter,etnetera/jmeter
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.protocol.http.sampler;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.net.URL;
import java.util.Locale;
import org.apache.jmeter.engine.util.ValueReplacer;
import org.apache.jmeter.junit.JMeterTestCase;
import org.apache.jmeter.protocol.http.control.HttpMirrorServer;
import org.apache.jmeter.protocol.http.control.TestHTTPMirrorThread;
import org.apache.jmeter.protocol.http.util.EncoderCache;
import org.apache.jmeter.protocol.http.util.HTTPArgument;
import org.apache.jmeter.protocol.http.util.HTTPFileArg;
import org.apache.jmeter.testelement.TestPlan;
import org.apache.jmeter.threads.JMeterContextService;
import org.apache.jmeter.threads.JMeterVariables;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.oro.text.regex.MatchResult;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.PatternMatcherInput;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import junit.framework.Test;
import junit.framework.TestSuite;
import junit.extensions.TestSetup;
/**
* Class for performing actual samples for HTTPSampler and HTTPSampler2.
* The samples are executed against the HttpMirrorServer, which is
* started when the unit tests are executed.
*/
public class TestHTTPSamplersAgainstHttpMirrorServer extends JMeterTestCase {
private final static int HTTP_SAMPLER = 0;
private final static int HTTP_SAMPLER2 = 1;
private final static int HTTP_SAMPLER3 = 2;
/** The encodings used for http headers and control information */
private final static String ISO_8859_1 = "ISO-8859-1"; // $NON-NLS-1$
private static final String US_ASCII = "US-ASCII"; // $NON-NLS-1$
private static final byte[] CRLF = { 0x0d, 0x0A };
private static final int MIRROR_PORT = 8081; // Different from TestHTTPMirrorThread port
private static byte[] TEST_FILE_CONTENT;
private static File temporaryFile;
private final int item;
public TestHTTPSamplersAgainstHttpMirrorServer(String arg0) {
super(arg0);
this.item = -1;
}
// additional ctor for processing tests which use int parameters
public TestHTTPSamplersAgainstHttpMirrorServer(String arg0, int item) {
super(arg0);
this.item = item;
}
// This is used to emulate @before class and @after class
public static Test suite(){
final TestSuite testSuite = new TestSuite(TestHTTPSamplersAgainstHttpMirrorServer.class);
// Add parameterised tests. For simplicity we assune each has cases 0-10
for(int i=0; i<11; i++) {
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testGetRequest_Parameters", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testGetRequest_Parameters2", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testGetRequest_Parameters3", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testPostRequest_UrlEncoded", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testPostRequest_UrlEncoded2", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testPostRequest_UrlEncoded3", i));
}
TestSetup setup = new TestSetup(testSuite){
private HttpMirrorServer httpServer;
@Override
protected void setUp() throws Exception {
httpServer = TestHTTPMirrorThread.startHttpMirror(MIRROR_PORT);
// Create the test file content
TEST_FILE_CONTENT = "some foo content &?=01234+56789-\u007c\u2aa1\u266a\u0153\u20a1\u0115\u0364\u00c5\u2052\uc385%C3%85".getBytes("UTF-8");
// create a temporary file to make sure we always have a file to give to the PostWriter
// Whereever we are or Whatever the current path is.
temporaryFile = File.createTempFile("TestHTTPSamplersAgainstHttpMirrorServer", "tmp");
OutputStream output = new FileOutputStream(temporaryFile);
output.write(TEST_FILE_CONTENT);
output.flush();
output.close();
}
@Override
protected void tearDown() throws Exception {
// Shutdown mirror server
httpServer.stopServer();
httpServer = null;
// delete temporay file
if(!temporaryFile.delete()) {
fail("Could not delete file:"+temporaryFile.getAbsolutePath());
}
}
};
return setup;
}
public void itemised_testPostRequest_UrlEncoded() throws Exception {
testPostRequest_UrlEncoded(HTTP_SAMPLER, ISO_8859_1, item);
}
public void itemised_testPostRequest_UrlEncoded2() throws Exception {
testPostRequest_UrlEncoded(HTTP_SAMPLER2, US_ASCII, item);
}
public void itemised_testPostRequest_UrlEncoded3() throws Exception {
testPostRequest_UrlEncoded(HTTP_SAMPLER3, US_ASCII, item);
}
public void testPostRequest_FormMultipart_0() throws Exception {
testPostRequest_FormMultipart(HTTP_SAMPLER, ISO_8859_1);
}
public void testPostRequest_FormMultipart2() throws Exception {
testPostRequest_FormMultipart(HTTP_SAMPLER2, US_ASCII);
}
public void testPostRequest_FormMultipart3() throws Exception {
testPostRequest_FormMultipart(HTTP_SAMPLER3, US_ASCII);
}
public void testPostRequest_FileUpload() throws Exception {
testPostRequest_FileUpload(HTTP_SAMPLER, ISO_8859_1);
}
public void testPostRequest_FileUpload2() throws Exception {
testPostRequest_FileUpload(HTTP_SAMPLER2, US_ASCII);
}
public void testPostRequest_FileUpload3() throws Exception {
testPostRequest_FileUpload(HTTP_SAMPLER3, US_ASCII);
}
public void testPostRequest_BodyFromParameterValues() throws Exception {
testPostRequest_BodyFromParameterValues(HTTP_SAMPLER, ISO_8859_1);
}
public void testPostRequest_BodyFromParameterValues2() throws Exception {
testPostRequest_BodyFromParameterValues(HTTP_SAMPLER2, US_ASCII);
}
public void testPostRequest_BodyFromParameterValues3() throws Exception {
testPostRequest_BodyFromParameterValues(HTTP_SAMPLER3, US_ASCII);
}
public void testGetRequest() throws Exception {
testGetRequest(HTTP_SAMPLER);
}
public void testGetRequest2() throws Exception {
testGetRequest(HTTP_SAMPLER2);
}
public void testGetRequest3() throws Exception {
testGetRequest(HTTP_SAMPLER3);
}
public void itemised_testGetRequest_Parameters() throws Exception {
testGetRequest_Parameters(HTTP_SAMPLER, item);
}
public void itemised_testGetRequest_Parameters2() throws Exception {
testGetRequest_Parameters(HTTP_SAMPLER2, item);
}
public void itemised_testGetRequest_Parameters3() throws Exception {
testGetRequest_Parameters(HTTP_SAMPLER3, item);
}
private void testPostRequest_UrlEncoded(int samplerType, String samplerDefaultEncoding, int test) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
HTTPSamplerBase sampler = createHttpSampler(samplerType);
HTTPSampleResult res;
String contentEncoding;
switch(test) {
case 0:
// Test sending data with default encoding
contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 1:
// Test sending data as ISO-8859-1
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 2:
// Test sending data as UTF-8
contentEncoding = "UTF-8";
titleValue = "mytitle2\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription2\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 3:
// Test sending data as UTF-8, with values that will change when urlencoded
contentEncoding = "UTF-8";
titleValue = "mytitle3/=";
descriptionValue = "mydescription3 /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 4:
// Test sending data as UTF-8, with values that have been urlencoded
contentEncoding = "UTF-8";
titleValue = "mytitle4%2F%3D";
descriptionValue = "mydescription4+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true);
break;
case 5:
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses
contentEncoding = "UTF-8";
titleValue = "/wEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ/rA+8DZ2dnZ2dnZ2d/GNDar6OshPwdJc=";
descriptionValue = "mydescription5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 6:
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses,
// with values urlencoded, but the always encode set to false for the arguments
// This is how the HTTP Proxy server adds arguments to the sampler
contentEncoding = "UTF-8";
titleValue = "%2FwEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ%2FrA%2B8DZ2dnZ2dnZ2d%2FGNDar6OshPwdJc%3D";
descriptionValue = "mydescription6";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
assertFalse(((HTTPArgument)sampler.getArguments().getArgument(0)).isAlwaysEncoded());
assertFalse(((HTTPArgument)sampler.getArguments().getArgument(1)).isAlwaysEncoded());
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true);
break;
case 7:
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle7\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription7\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
String expectedTitleValue = "a test\u00c5mytitle7\u0153\u20a1\u0115\u00c5";
String expectedDescriptionValue = "mydescription7\u0153\u20a1\u0115\u00c5the_end";
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue, false);
break;
case 8:
break;
case 9:
break;
case 10:
break;
default:
fail("Unexpected switch value: "+test);
}
}
private void testPostRequest_FormMultipart(int samplerType, String samplerDefaultEncoding) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
HTTPSampleResult res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as ISO-8859-1
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8, with values that would have been urlencoded
// if it was not sent as multipart
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle/=";
descriptionValue = "mydescription /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8, with values that have been urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle%2F%3D";
descriptionValue = "mydescription+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
String expectedTitleValue = "mytitle/=";
String expectedDescriptionValue = "mydescription /\\";
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue);
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "/wEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ/rA+8DZ2dnZ2dnZ2d/GNDar6OshPwdJc=";
descriptionValue = "mydescription";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
expectedTitleValue = "a test\u00c5mytitle\u0153\u20a1\u0115\u00c5";
expectedDescriptionValue = "mydescription\u0153\u20a1\u0115\u00c5the_end";
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue);
}
private void testPostRequest_FileUpload(int samplerType, String samplerDefaultEncoding) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
String fileField = "file1";
String fileMimeType = "text/plain";
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFileUploadData(sampler, false, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType);
HTTPSampleResult res = executeSampler(sampler);
checkPostRequestFileUpload(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType, TEST_FILE_CONTENT);
// Test sending data as ISO-8859-1
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFileUploadData(sampler, false, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType);
res = executeSampler(sampler);
checkPostRequestFileUpload(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType, TEST_FILE_CONTENT);
// Test sending data as UTF-8
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFileUploadData(sampler, false, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType);
res = executeSampler(sampler);
checkPostRequestFileUpload(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType, TEST_FILE_CONTENT);
}
private void testPostRequest_BodyFromParameterValues(int samplerType, String samplerDefaultEncoding) throws Exception {
final String titleField = ""; // ensure only values are used
String titleValue = "mytitle";
final String descriptionField = ""; // ensure only values are used
String descriptionValue = "mydescription";
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
HTTPSampleResult res = executeSampler(sampler);
String expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as ISO-8859-1
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that will change when urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle/=";
descriptionValue = "mydescription /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that will change when urlencoded, and where
// we tell the sampler to urlencode the parameter value
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle/=";
descriptionValue = "mydescription /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
expectedPostBody = URLEncoder.encode(titleValue + descriptionValue, contentEncoding);
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that have been urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle%2F%3D";
descriptionValue = "mydescription+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that have been urlencoded, and
// where we tell the sampler to urlencode the parameter values
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle%2F%3D";
descriptionValue = "mydescription+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "/wEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ/rA+8DZ2dnZ2dnZ2d/GNDar6OshPwdJc=";
descriptionValue = "mydescription";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with + as part of the value,
// where the value is set in sampler as not urluencoded, but the
// isalwaysencoded flag of the argument is set to false.
// This mimics the HTTPSamplerBase.addNonEncodedArgument, which the
// Proxy server calls in some cases
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle++";
descriptionValue = "mydescription+";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
String expectedTitleValue = "a test\u00c5mytitle\u0153\u20a1\u0115\u00c5";
String expectedDescriptionValue = "mydescription\u0153\u20a1\u0115\u00c5the_end";
expectedPostBody = expectedTitleValue+ expectedDescriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
}
private void testGetRequest(int samplerType) throws Exception {
// Test sending simple HTTP get
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
HTTPSampleResult res = executeSampler(sampler);
checkGetRequest(sampler, res);
// Test sending data with ISO-8859-1 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
res = executeSampler(sampler);
checkGetRequest(sampler, res);
// Test sending data with UTF-8 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
res = executeSampler(sampler);
checkGetRequest(sampler, res);
}
private void testGetRequest_Parameters(int samplerType, int test) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding;
HTTPSampleResult res;
URL executedUrl;
switch(test) {
case 0:
// Test sending simple HTTP get
// Test sending data with default encoding
contentEncoding = "";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 1:
// Test sending data with ISO-8859-1 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
titleValue = "mytitle1\uc385";
descriptionValue = "mydescription1\uc385";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 2:
// Test sending data with UTF-8 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle2\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription2\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 3:
// Test sending data as UTF-8, with values that changes when urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle3\u0153+\u20a1 \u0115&yes\u00c5";
descriptionValue = "mydescription3 \u0153 \u20a1 \u0115 \u00c5";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 4:
// Test sending data as UTF-8, with values that have been urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle4%2F%3D";
descriptionValue = "mydescription4+++%2F%5C";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, true);
break;
case 5:
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle5\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription5\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
String expectedTitleValue = "a test\u00c5mytitle5\u0153\u20a1\u0115\u00c5";
String expectedDescriptionValue = "mydescription5\u0153\u20a1\u0115\u00c5the_end";
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue, false);
break;
case 6:
break;
case 7:
break;
case 8:
break;
case 9:
break;
case 10:
break;
default:
fail("Unexpected switch value: "+test);
}
}
private HTTPSampleResult executeSampler(HTTPSamplerBase sampler) {
sampler.setRunningVersion(true);
sampler.threadStarted();
HTTPSampleResult res = (HTTPSampleResult) sampler.sample();
sampler.threadFinished();
sampler.setRunningVersion(false);
return res;
}
private void checkPostRequestUrlEncoded(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
boolean valuesAlreadyUrlEncoded) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
String expectedPostBody = null;
if(!valuesAlreadyUrlEncoded) {
String expectedTitle = URLEncoder.encode(titleValue, contentEncoding);
String expectedDescription = URLEncoder.encode(descriptionValue, contentEncoding);
expectedPostBody = titleField + "=" + expectedTitle + "&" + descriptionField + "=" + expectedDescription;
}
else {
expectedPostBody = titleField + "=" + titleValue + "&" + descriptionField + "=" + descriptionValue;
}
// Check the request
checkPostRequestBody(
sampler,
res,
samplerDefaultEncoding,
contentEncoding,
expectedPostBody
);
}
private void checkPostRequestFormMultipart(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
String boundaryString = getBoundaryStringFromContentType(res.getRequestHeaders());
assertNotNull(boundaryString);
byte[] expectedPostBody = createExpectedFormdataOutput(boundaryString, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true, true);
// Check request headers
checkHeaderTypeLength(res.getRequestHeaders(), "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
// Check post body from the result query string
checkArraysHaveSameContent(expectedPostBody, res.getQueryString().getBytes(contentEncoding), contentEncoding);
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), contentEncoding);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found");
}
// Check response headers
checkHeaderTypeLength(headersSent, "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
// Check post body which was sent to the mirror server, and
// sent back by the mirror server
checkArraysHaveSameContent(expectedPostBody, bodySent.getBytes(contentEncoding), contentEncoding);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkPostRequestFileUpload(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
String fileField,
File fileValue,
String fileMimeType,
byte[] fileContent) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
String boundaryString = getBoundaryStringFromContentType(res.getRequestHeaders());
assertNotNull(boundaryString);
byte[] expectedPostBody = createExpectedFormAndUploadOutput(boundaryString, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, fileValue, fileMimeType, fileContent);
// Check request headers
checkHeaderTypeLength(res.getRequestHeaders(), "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
// We cannot check post body from the result query string, since that will not contain
// the actual file content, but placeholder text for file content
//checkArraysHaveSameContent(expectedPostBody, res.getQueryString().getBytes(contentEncoding));
// Find the data sent to the mirror server, which the mirror server is sending back to us
String headersSent = getHeadersSent(res.getResponseData());
if(headersSent == null) {
fail("No header and body section found");
}
// Check response headers
checkHeaderTypeLength(headersSent, "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
byte[] bodySent = getBodySent(res.getResponseData());
assertNotNull("Sent body should not be null", bodySent);
// Check post body which was sent to the mirror server, and
// sent back by the mirror server
checkArraysHaveSameContent(expectedPostBody, bodySent, contentEncoding);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkPostRequestBody(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String expectedPostBody) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
// Check request headers
checkHeaderTypeLength(res.getRequestHeaders(), HTTPSamplerBase.APPLICATION_X_WWW_FORM_URLENCODED, expectedPostBody.getBytes(contentEncoding).length);
// Check post body from the result query string
checkArraysHaveSameContent(expectedPostBody.getBytes(contentEncoding), res.getQueryString().getBytes(contentEncoding), contentEncoding);
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), contentEncoding);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found");
}
// Check response headers
checkHeaderTypeLength(headersSent, HTTPSamplerBase.APPLICATION_X_WWW_FORM_URLENCODED, expectedPostBody.getBytes(contentEncoding).length);
// Check post body which was sent to the mirror server, and
// sent back by the mirror server
checkArraysHaveSameContent(expectedPostBody.getBytes(contentEncoding), bodySent.getBytes(contentEncoding), contentEncoding);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkGetRequest(
HTTPSamplerBase sampler,
HTTPSampleResult res
) throws IOException {
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
// Check method
assertEquals(sampler.getMethod(), res.getHTTPMethod());
// Check that the query string is empty
assertEquals(0, res.getQueryString().length());
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), EncoderCache.URL_ARGUMENT_ENCODING);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found");
}
// No body should have been sent
assertEquals(bodySent.length(), 0);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkGetRequest_Parameters(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String contentEncoding,
URL executedUrl,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
boolean valuesAlreadyUrlEncoded) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = EncoderCache.URL_ARGUMENT_ENCODING;
}
// Check URL
assertEquals(executedUrl, res.getURL());
// Check method
assertEquals(sampler.getMethod(), res.getHTTPMethod());
// Cannot check the query string of the result, because the mirror server
// replies without including query string in URL
String expectedQueryString = null;
if(!valuesAlreadyUrlEncoded) {
String expectedTitle = URLEncoder.encode(titleValue, contentEncoding);
String expectedDescription = URLEncoder.encode(descriptionValue, contentEncoding);
expectedQueryString = titleField + "=" + expectedTitle + "&" + descriptionField + "=" + expectedDescription;
}
else {
expectedQueryString = titleField + "=" + titleValue + "&" + descriptionField + "=" + descriptionValue;
}
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), EncoderCache.URL_ARGUMENT_ENCODING);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found in: ["+dataSentToMirrorServer+"]");
}
// No body should have been sent
assertEquals(bodySent.length(), 0);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), expectedQueryString);
}
private void checkMethodPathQuery(
String headersSent,
String expectedMethod,
String expectedPath,
String expectedQueryString)
throws IOException {
// Check the Request URI sent to the mirror server, and
// sent back by the mirror server
int indexFirstSpace = headersSent.indexOf(" ");
int indexSecondSpace = headersSent.indexOf(" ", headersSent.length() > indexFirstSpace ? indexFirstSpace + 1 : indexFirstSpace);
if(indexFirstSpace <= 0 && indexSecondSpace <= 0 || indexFirstSpace == indexSecondSpace) {
fail("Could not find method and URI sent");
}
String methodSent = headersSent.substring(0, indexFirstSpace);
assertEquals(expectedMethod, methodSent);
String uriSent = headersSent.substring(indexFirstSpace + 1, indexSecondSpace);
int indexQueryStart = uriSent.indexOf("?");
if(expectedQueryString != null && expectedQueryString.length() > 0) {
// We should have a query string part
if(indexQueryStart <= 0 || (indexQueryStart == uriSent.length() - 1)) {
fail("Could not find query string in URI");
}
}
else {
if(indexQueryStart > 0) {
// We should not have a query string part
fail("Query string present in URI");
}
else {
indexQueryStart = uriSent.length();
}
}
// Check path
String pathSent = uriSent.substring(0, indexQueryStart);
assertEquals(expectedPath, pathSent);
// Check query
if(expectedQueryString != null && expectedQueryString.length() > 0) {
String queryStringSent = uriSent.substring(indexQueryStart + 1);
// Is it only the parameter values which are encoded in the specified
// content encoding, the rest of the query is encoded in UTF-8
// Therefore we compare the whole query using UTF-8
checkArraysHaveSameContent(expectedQueryString.getBytes(EncoderCache.URL_ARGUMENT_ENCODING), queryStringSent.getBytes(EncoderCache.URL_ARGUMENT_ENCODING), EncoderCache.URL_ARGUMENT_ENCODING);
}
}
private String getHeadersSent(byte[] responseData) throws IOException {
// Find the data sent to the mirror server, which the mirror server is sending back to us
// We assume the headers are in ISO_8859_1, and the body can be in any content encoding.
String dataSentToMirrorServer = new String(responseData, ISO_8859_1);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
}
return headersSent;
}
private byte[] getBodySent(byte[] responseData) throws IOException {
// Find the data sent to the mirror server, which the mirror server is sending back to us
// We assume the headers are in ISO_8859_1, and the body can be in any content encoding.
// Therefore we get the data sent in ISO_8859_1, to be able to determine the end of the
// header part, and then we just construct a byte array to hold the body part, not taking
// encoding of the body into consideration, because it can contain file data, which is
// sent as raw byte data
byte[] bodySent = null;
String headersSent = getHeadersSent(responseData);
if(headersSent != null) {
// Get the content length, it tells us how much data to read
// TODO : Maybe support chunked encoding, then we cannot rely on content length
String contentLengthValue = getSentRequestHeaderValue(headersSent, HTTPSamplerBase.HEADER_CONTENT_LENGTH);
int contentLength = -1;
if(contentLengthValue != null) {
contentLength = new Integer(contentLengthValue).intValue();
}
else {
fail("Did not receive any content-length header");
}
bodySent = new byte[contentLength];
System.arraycopy(responseData, responseData.length - contentLength, bodySent, 0, contentLength);
}
return bodySent;
}
private boolean isInRequestHeaders(String requestHeaders, String headerName, String headerValue) {
return checkRegularExpression(requestHeaders, headerName + ": " + headerValue);
}
// Java 1.6.0_22+ no longer allows Content-Length to be set, so don't check it.
// See: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6996110
// TODO any point in checking the other headers?
private void checkHeaderTypeLength(String requestHeaders, String contentType, int contentLen) {
boolean typeOK = isInRequestHeaders(requestHeaders, HTTPSamplerBase.HEADER_CONTENT_TYPE, contentType);
// boolean lengOK = isInRequestHeaders(requestHeaders, HTTPSamplerBase.HEADER_CONTENT_LENGTH, Integer.toString(contentLen));
if (!typeOK){
fail("Expected type:" + contentType + " in:\n"+ requestHeaders);
}
// if (!lengOK){
// fail("Expected & length: " +contentLen + " in:\n"+requestHeaders);
// }
}
private String getSentRequestHeaderValue(String requestHeaders, String headerName) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
String expression = ".*" + headerName + ": (\\d*).*";
Pattern pattern = JMeterUtils.getPattern(expression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.SINGLELINE_MASK);
if(localMatcher.matches(requestHeaders, pattern)) {
// The value is in the first group, group 0 is the whole match
return localMatcher.getMatch().group(1);
}
return null;
}
private boolean checkRegularExpression(String stringToCheck, String regularExpression) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
Pattern pattern = JMeterUtils.getPattern(regularExpression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.SINGLELINE_MASK);
return localMatcher.contains(stringToCheck, pattern);
}
private int getPositionOfBody(String stringToCheck) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
// The headers and body are divided by a blank line
String regularExpression = "^.$";
Pattern pattern = JMeterUtils.getPattern(regularExpression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.MULTILINE_MASK);
PatternMatcherInput input = new PatternMatcherInput(stringToCheck);
while(localMatcher.contains(input, pattern)) {
MatchResult match = localMatcher.getMatch();
return match.beginOffset(0);
}
// No divider was found
return -1;
}
private String getBoundaryStringFromContentType(String requestHeaders) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
String regularExpression = "^" + HTTPSamplerBase.HEADER_CONTENT_TYPE + ": multipart/form-data; boundary=(.+)$";
Pattern pattern = JMeterUtils.getPattern(regularExpression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.MULTILINE_MASK);
if(localMatcher.contains(requestHeaders, pattern)) {
MatchResult match = localMatcher.getMatch();
String matchString = match.group(1);
// Header may contain ;charset= , regexp extracts it so computed boundary is wrong
int indexOf = matchString.indexOf(";");
if(indexOf>=0) {
return matchString.substring(0, indexOf);
} else {
return matchString;
}
}
else {
return null;
}
}
private void setupUrl(HTTPSamplerBase sampler, String contentEncoding) {
String protocol = "http";
// String domain = "localhost";
String domain = "localhost";
String path = "/test/somescript.jsp";
sampler.setProtocol(protocol);
sampler.setMethod(HTTPSamplerBase.POST);
sampler.setPath(path);
sampler.setDomain(domain);
sampler.setPort(MIRROR_PORT);
sampler.setContentEncoding(contentEncoding);
}
/**
* Setup the form data with specified values
*
* @param httpSampler
*/
private void setupFormData(HTTPSamplerBase httpSampler, boolean isEncoded, String titleField, String titleValue, String descriptionField, String descriptionValue) {
if(isEncoded) {
httpSampler.addEncodedArgument(titleField, titleValue);
httpSampler.addEncodedArgument(descriptionField, descriptionValue);
}
else {
httpSampler.addArgument(titleField, titleValue);
httpSampler.addArgument(descriptionField, descriptionValue);
}
}
/**
* Setup the form data with specified values, and file to upload
*
* @param httpSampler
*/
private void setupFileUploadData(
HTTPSamplerBase httpSampler,
boolean isEncoded,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
String fileField,
File fileValue,
String fileMimeType) {
// Set the form data
setupFormData(httpSampler, isEncoded, titleField, titleValue, descriptionField, descriptionValue);
// Set the file upload data
HTTPFileArg[] hfa = {new HTTPFileArg(fileValue == null ? "" : fileValue.getAbsolutePath(), fileField, fileMimeType)};
httpSampler.setHTTPFiles(hfa);
}
/**
* Check that the the two byte arrays have identical content
*
* @param expected
* @param actual
* @throws UnsupportedEncodingException
*/
private void checkArraysHaveSameContent(byte[] expected, byte[] actual, String encoding) throws UnsupportedEncodingException {
if(expected != null && actual != null) {
if(expected.length != actual.length) {
System.out.println("\n>>>>>>>>>>>>>>>>>>>> expected:");
System.out.println(new String(expected, encoding));
System.out.println("==================== actual:");
System.out.println(new String(actual, encoding));
System.out.println("<<<<<<<<<<<<<<<<<<<<");
fail("arrays have different length, expected is " + expected.length + ", actual is " + actual.length);
}
else {
for(int i = 0; i < expected.length; i++) {
if(expected[i] != actual[i]) {
System.out.println("\n>>>>>>>>>>>>>>>>>>>> expected:");
System.out.println(new String(expected,0,i+1, encoding));
System.out.println("==================== actual:");
System.out.println(new String(actual,0,i+1, encoding));
System.out.println("<<<<<<<<<<<<<<<<<<<<");
/*
// Useful to when debugging
for(int j = 0; j < expected.length; j++) {
System.out.print(expected[j] + " ");
}
System.out.println();
for(int j = 0; j < actual.length; j++) {
System.out.print(actual[j] + " ");
}
System.out.println();
*/
fail("byte at position " + i + " is different, expected is " + expected[i] + ", actual is " + actual[i]);
}
}
}
}
else {
fail("expected or actual byte arrays were null");
}
}
/**
* Create the expected output multipart/form-data, with only form data,
* and no file multipart.
* This method is copied from the PostWriterTest class
*
* @param lastMultipart true if this is the last multipart in the request
*/
private byte[] createExpectedFormdataOutput(
String boundaryString,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
boolean firstMultipart,
boolean lastMultipart) throws IOException {
// The encoding used for http headers and control information
final byte[] DASH_DASH = "--".getBytes(ISO_8859_1);
final ByteArrayOutputStream output = new ByteArrayOutputStream();
if(firstMultipart) {
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
output.write(CRLF);
}
output.write("Content-Disposition: form-data; name=\"".getBytes(ISO_8859_1));
output.write(titleField.getBytes(ISO_8859_1));
output.write("\"".getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Type: text/plain".getBytes(ISO_8859_1));
if(contentEncoding != null) {
output.write("; charset=".getBytes(ISO_8859_1));
output.write(contentEncoding.getBytes(ISO_8859_1));
}
output.write(CRLF);
output.write("Content-Transfer-Encoding: 8bit".getBytes(ISO_8859_1));
output.write(CRLF);
output.write(CRLF);
if(contentEncoding != null) {
output.write(titleValue.getBytes(contentEncoding));
}
else {
output.write(titleValue.getBytes()); // TODO - charset?
}
output.write(CRLF);
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Disposition: form-data; name=\"".getBytes(ISO_8859_1));
output.write(descriptionField.getBytes(ISO_8859_1));
output.write("\"".getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Type: text/plain".getBytes(ISO_8859_1));
if(contentEncoding != null) {
output.write("; charset=".getBytes(ISO_8859_1));
output.write(contentEncoding.getBytes(ISO_8859_1));
}
output.write(CRLF);
output.write("Content-Transfer-Encoding: 8bit".getBytes(ISO_8859_1));
output.write(CRLF);
output.write(CRLF);
if(contentEncoding != null) {
output.write(descriptionValue.getBytes(contentEncoding));
}
else {
output.write(descriptionValue.getBytes()); // TODO - charset?
}
output.write(CRLF);
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
if(lastMultipart) {
output.write(DASH_DASH);
}
output.write(CRLF);
output.flush();
output.close();
return output.toByteArray();
}
/**
* Create the expected file multipart
*
* @param lastMultipart true if this is the last multipart in the request
*/
private byte[] createExpectedFilepartOutput(
String boundaryString,
String fileField,
File file,
String mimeType,
byte[] fileContent,
boolean firstMultipart,
boolean lastMultipart) throws IOException {
final byte[] DASH_DASH = "--".getBytes(ISO_8859_1);
final ByteArrayOutputStream output = new ByteArrayOutputStream();
if(firstMultipart) {
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
output.write(CRLF);
}
// replace all backslash with double backslash
String filename = file.getName();
output.write("Content-Disposition: form-data; name=\"".getBytes(ISO_8859_1));
output.write(fileField.getBytes(ISO_8859_1));
output.write(("\"; filename=\"" + filename + "\"").getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Type: ".getBytes(ISO_8859_1));
output.write(mimeType.getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Transfer-Encoding: binary".getBytes(ISO_8859_1));
output.write(CRLF);
output.write(CRLF);
output.write(fileContent);
output.write(CRLF);
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
if(lastMultipart) {
output.write(DASH_DASH);
}
output.write(CRLF);
output.flush();
output.close();
return output.toByteArray();
}
/**
* Create the expected output post body for form data and file multiparts
* with specified values, when request is multipart
*/
private byte[] createExpectedFormAndUploadOutput(
String boundaryString,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
String fileField,
File fileValue,
String fileMimeType,
byte[] fileContent) throws IOException {
// Create the multiparts
byte[] formdataMultipart = createExpectedFormdataOutput(boundaryString, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true, false);
byte[] fileMultipart = createExpectedFilepartOutput(boundaryString, fileField, fileValue, fileMimeType, fileContent, false, true);
// Join the two multiparts
ByteArrayOutputStream output = new ByteArrayOutputStream();
output.write(formdataMultipart);
output.write(fileMultipart);
output.flush();
output.close();
return output.toByteArray();
}
private HTTPSamplerBase createHttpSampler(int samplerType) {
switch(samplerType) {
case HTTP_SAMPLER:
return new HTTPSampler();
case HTTP_SAMPLER2:
return new HTTPSampler2();
case HTTP_SAMPLER3:
return new HTTPSampler3();
}
throw new IllegalArgumentException("Unexpected type: "+samplerType);
}
}
|
test/src/org/apache/jmeter/protocol/http/sampler/TestHTTPSamplersAgainstHttpMirrorServer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.protocol.http.sampler;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.net.URL;
import java.util.Locale;
import org.apache.jmeter.engine.util.ValueReplacer;
import org.apache.jmeter.junit.JMeterTestCase;
import org.apache.jmeter.protocol.http.control.HttpMirrorServer;
import org.apache.jmeter.protocol.http.control.TestHTTPMirrorThread;
import org.apache.jmeter.protocol.http.util.EncoderCache;
import org.apache.jmeter.protocol.http.util.HTTPArgument;
import org.apache.jmeter.protocol.http.util.HTTPFileArg;
import org.apache.jmeter.testelement.TestPlan;
import org.apache.jmeter.threads.JMeterContextService;
import org.apache.jmeter.threads.JMeterVariables;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.oro.text.regex.MatchResult;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.PatternMatcherInput;
import org.apache.oro.text.regex.Perl5Compiler;
import org.apache.oro.text.regex.Perl5Matcher;
import junit.framework.Test;
import junit.framework.TestSuite;
import junit.extensions.TestSetup;
/**
* Class for performing actual samples for HTTPSampler and HTTPSampler2.
* The samples are executed against the HttpMirrorServer, which is
* started when the unit tests are executed.
*/
public class TestHTTPSamplersAgainstHttpMirrorServer extends JMeterTestCase {
private final static int HTTP_SAMPLER = 0;
private final static int HTTP_SAMPLER2 = 1;
private final static int HTTP_SAMPLER3 = 2;
/** The encodings used for http headers and control information */
private final static String ISO_8859_1 = "ISO-8859-1"; // $NON-NLS-1$
private static final String US_ASCII = "US-ASCII"; // $NON-NLS-1$
private static final byte[] CRLF = { 0x0d, 0x0A };
private static final int MIRROR_PORT = 8081; // Different from TestHTTPMirrorThread port
private static byte[] TEST_FILE_CONTENT;
private static File temporaryFile;
private final int item;
public TestHTTPSamplersAgainstHttpMirrorServer(String arg0) {
super(arg0);
this.item = -1;
}
// additional ctor for processing tests which use int parameters
public TestHTTPSamplersAgainstHttpMirrorServer(String arg0, int item) {
super(arg0);
this.item = item;
}
// This is used to emulate @before class and @after class
public static Test suite(){
final TestSuite testSuite = new TestSuite(TestHTTPSamplersAgainstHttpMirrorServer.class);
// Add parameterised tests. For simplicity we assune each has cases 0-10
for(int i=0; i<11; i++) {
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testGetRequest_Parameters", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testGetRequest_Parameters2", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testGetRequest_Parameters3", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testPostRequest_UrlEncoded", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testPostRequest_UrlEncoded2", i));
testSuite.addTest(new TestHTTPSamplersAgainstHttpMirrorServer("itemised_testPostRequest_UrlEncoded3", i));
}
TestSetup setup = new TestSetup(testSuite){
private HttpMirrorServer httpServer;
@Override
protected void setUp() throws Exception {
httpServer = TestHTTPMirrorThread.startHttpMirror(MIRROR_PORT);
// Create the test file content
TEST_FILE_CONTENT = "some foo content &?=01234+56789-\u007c\u2aa1\u266a\u0153\u20a1\u0115\u0364\u00c5\u2052\uc385%C3%85".getBytes("UTF-8");
// create a temporary file to make sure we always have a file to give to the PostWriter
// Whereever we are or Whatever the current path is.
temporaryFile = File.createTempFile("TestHTTPSamplersAgainstHttpMirrorServer", "tmp");
OutputStream output = new FileOutputStream(temporaryFile);
output.write(TEST_FILE_CONTENT);
output.flush();
output.close();
}
@Override
protected void tearDown() throws Exception {
// Shutdown mirror server
httpServer.stopServer();
httpServer = null;
// delete temporay file
if(!temporaryFile.delete()) {
fail("Could not delete file:"+temporaryFile.getAbsolutePath());
}
}
};
return setup;
}
public void itemised_testPostRequest_UrlEncoded() throws Exception {
testPostRequest_UrlEncoded(HTTP_SAMPLER, ISO_8859_1, item);
}
public void itemised_testPostRequest_UrlEncoded2() throws Exception {
testPostRequest_UrlEncoded(HTTP_SAMPLER2, US_ASCII, item);
}
public void itemised_testPostRequest_UrlEncoded3() throws Exception {
testPostRequest_UrlEncoded(HTTP_SAMPLER3, US_ASCII, item);
}
public void testPostRequest_FormMultipart_0() throws Exception {
testPostRequest_FormMultipart(HTTP_SAMPLER, ISO_8859_1);
}
public void testPostRequest_FormMultipart2() throws Exception {
testPostRequest_FormMultipart(HTTP_SAMPLER2, US_ASCII);
}
public void testPostRequest_FormMultipart3() throws Exception {
testPostRequest_FormMultipart(HTTP_SAMPLER3, US_ASCII);
}
public void testPostRequest_FileUpload() throws Exception {
testPostRequest_FileUpload(HTTP_SAMPLER, ISO_8859_1);
}
public void testPostRequest_FileUpload2() throws Exception {
testPostRequest_FileUpload(HTTP_SAMPLER2, US_ASCII);
}
public void testPostRequest_FileUpload3() throws Exception {
testPostRequest_FileUpload(HTTP_SAMPLER3, US_ASCII);
}
public void testPostRequest_BodyFromParameterValues() throws Exception {
testPostRequest_BodyFromParameterValues(HTTP_SAMPLER, ISO_8859_1);
}
public void testPostRequest_BodyFromParameterValues2() throws Exception {
testPostRequest_BodyFromParameterValues(HTTP_SAMPLER2, US_ASCII);
}
public void testPostRequest_BodyFromParameterValues3() throws Exception {
testPostRequest_BodyFromParameterValues(HTTP_SAMPLER3, US_ASCII);
}
public void testGetRequest() throws Exception {
testGetRequest(HTTP_SAMPLER);
}
public void testGetRequest2() throws Exception {
testGetRequest(HTTP_SAMPLER2);
}
public void testGetRequest3() throws Exception {
testGetRequest(HTTP_SAMPLER3);
}
public void itemised_testGetRequest_Parameters() throws Exception {
testGetRequest_Parameters(HTTP_SAMPLER, item);
}
public void itemised_testGetRequest_Parameters2() throws Exception {
testGetRequest_Parameters(HTTP_SAMPLER2, item);
}
public void itemised_testGetRequest_Parameters3() throws Exception {
testGetRequest_Parameters(HTTP_SAMPLER3, item);
}
private void testPostRequest_UrlEncoded(int samplerType, String samplerDefaultEncoding, int test) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
HTTPSamplerBase sampler = createHttpSampler(samplerType);
HTTPSampleResult res;
String contentEncoding;
switch(test) {
case 0:
// Test sending data with default encoding
contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 1:
// Test sending data as ISO-8859-1
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 2:
// Test sending data as UTF-8
contentEncoding = "UTF-8";
titleValue = "mytitle2\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription2\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 3:
// Test sending data as UTF-8, with values that will change when urlencoded
contentEncoding = "UTF-8";
titleValue = "mytitle3/=";
descriptionValue = "mydescription3 /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 4:
// Test sending data as UTF-8, with values that have been urlencoded
contentEncoding = "UTF-8";
titleValue = "mytitle4%2F%3D";
descriptionValue = "mydescription4+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true);
break;
case 5:
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses
contentEncoding = "UTF-8";
titleValue = "/wEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ/rA+8DZ2dnZ2dnZ2d/GNDar6OshPwdJc=";
descriptionValue = "mydescription5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 6:
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses,
// with values urlencoded, but the always encode set to false for the arguments
// This is how the HTTP Proxy server adds arguments to the sampler
contentEncoding = "UTF-8";
titleValue = "%2FwEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ%2FrA%2B8DZ2dnZ2dnZ2d%2FGNDar6OshPwdJc%3D";
descriptionValue = "mydescription6";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
assertFalse(((HTTPArgument)sampler.getArguments().getArgument(0)).isAlwaysEncoded());
assertFalse(((HTTPArgument)sampler.getArguments().getArgument(1)).isAlwaysEncoded());
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true);
break;
case 7:
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle7\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription7\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
String expectedTitleValue = "a test\u00c5mytitle7\u0153\u20a1\u0115\u00c5";
String expectedDescriptionValue = "mydescription7\u0153\u20a1\u0115\u00c5the_end";
checkPostRequestUrlEncoded(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue, false);
break;
case 8:
break;
case 9:
break;
case 10:
break;
default:
fail("Unexpected switch value: "+test);
}
}
private void testPostRequest_FormMultipart(int samplerType, String samplerDefaultEncoding) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
HTTPSampleResult res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as ISO-8859-1
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8, with values that would have been urlencoded
// if it was not sent as multipart
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle/=";
descriptionValue = "mydescription /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8, with values that have been urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle%2F%3D";
descriptionValue = "mydescription+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
String expectedTitleValue = "mytitle/=";
String expectedDescriptionValue = "mydescription /\\";
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue);
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "/wEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ/rA+8DZ2dnZ2dnZ2d/GNDar6OshPwdJc=";
descriptionValue = "mydescription";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
res = executeSampler(sampler);
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue);
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
sampler.setDoMultipartPost(true);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
expectedTitleValue = "a test\u00c5mytitle\u0153\u20a1\u0115\u00c5";
expectedDescriptionValue = "mydescription\u0153\u20a1\u0115\u00c5the_end";
checkPostRequestFormMultipart(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue);
}
private void testPostRequest_FileUpload(int samplerType, String samplerDefaultEncoding) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
String fileField = "file1";
String fileMimeType = "text/plain";
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFileUploadData(sampler, false, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType);
HTTPSampleResult res = executeSampler(sampler);
checkPostRequestFileUpload(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType, TEST_FILE_CONTENT);
// Test sending data as ISO-8859-1
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFileUploadData(sampler, false, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType);
res = executeSampler(sampler);
checkPostRequestFileUpload(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType, TEST_FILE_CONTENT);
// Test sending data as UTF-8
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFileUploadData(sampler, false, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType);
res = executeSampler(sampler);
checkPostRequestFileUpload(sampler, res, samplerDefaultEncoding, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, temporaryFile, fileMimeType, TEST_FILE_CONTENT);
}
private void testPostRequest_BodyFromParameterValues(int samplerType, String samplerDefaultEncoding) throws Exception {
final String titleField = ""; // ensure only values are used
String titleValue = "mytitle";
final String descriptionField = ""; // ensure only values are used
String descriptionValue = "mydescription";
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
HTTPSampleResult res = executeSampler(sampler);
String expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as ISO-8859-1
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that will change when urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle/=";
descriptionValue = "mydescription /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that will change when urlencoded, and where
// we tell the sampler to urlencode the parameter value
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle/=";
descriptionValue = "mydescription /\\";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
expectedPostBody = URLEncoder.encode(titleValue + descriptionValue, contentEncoding);
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that have been urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle%2F%3D";
descriptionValue = "mydescription+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values that have been urlencoded, and
// where we tell the sampler to urlencode the parameter values
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle%2F%3D";
descriptionValue = "mydescription+++%2F%5C";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with values similar to __VIEWSTATE parameter that .net uses
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "/wEPDwULLTE2MzM2OTA0NTYPZBYCAgMPZ/rA+8DZ2dnZ2dnZ2d/GNDar6OshPwdJc=";
descriptionValue = "mydescription";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, with + as part of the value,
// where the value is set in sampler as not urluencoded, but the
// isalwaysencoded flag of the argument is set to false.
// This mimics the HTTPSamplerBase.addNonEncodedArgument, which the
// Proxy server calls in some cases
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle++";
descriptionValue = "mydescription+";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
res = executeSampler(sampler);
expectedPostBody = titleValue + descriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
((HTTPArgument)sampler.getArguments().getArgument(0)).setAlwaysEncoded(false);
((HTTPArgument)sampler.getArguments().getArgument(1)).setAlwaysEncoded(false);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
String expectedTitleValue = "a test\u00c5mytitle\u0153\u20a1\u0115\u00c5";
String expectedDescriptionValue = "mydescription\u0153\u20a1\u0115\u00c5the_end";
expectedPostBody = expectedTitleValue+ expectedDescriptionValue;
checkPostRequestBody(sampler, res, samplerDefaultEncoding, contentEncoding, expectedPostBody);
}
private void testGetRequest(int samplerType) throws Exception {
// Test sending simple HTTP get
// Test sending data with default encoding
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding = "";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
HTTPSampleResult res = executeSampler(sampler);
checkGetRequest(sampler, res);
// Test sending data with ISO-8859-1 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
res = executeSampler(sampler);
checkGetRequest(sampler, res);
// Test sending data with UTF-8 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
res = executeSampler(sampler);
checkGetRequest(sampler, res);
}
private void testGetRequest_Parameters(int samplerType, int test) throws Exception {
String titleField = "title";
String titleValue = "mytitle";
String descriptionField = "description";
String descriptionValue = "mydescription";
HTTPSamplerBase sampler = createHttpSampler(samplerType);
String contentEncoding;
HTTPSampleResult res;
URL executedUrl;
switch(test) {
case 0:
// Test sending simple HTTP get
// Test sending data with default encoding
contentEncoding = "";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 1:
// Test sending data with ISO-8859-1 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = ISO_8859_1;
titleValue = "mytitle1\uc385";
descriptionValue = "mydescription1\uc385";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 2:
// Test sending data with UTF-8 encoding
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle2\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription2\u0153\u20a1\u0115\u00c5";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 3:
// Test sending data as UTF-8, with values that changes when urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle3\u0153+\u20a1 \u0115&yes\u00c5";
descriptionValue = "mydescription3 \u0153 \u20a1 \u0115 \u00c5";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, false);
break;
case 4:
// Test sending data as UTF-8, with values that have been urlencoded
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "mytitle4%2F%3D";
descriptionValue = "mydescription4+++%2F%5C";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, true, titleField, titleValue, descriptionField, descriptionValue);
res = executeSampler(sampler);
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, titleValue, descriptionField, descriptionValue, true);
break;
case 5:
// Test sending data as UTF-8, where user defined variables are used
// to set the value for form data
JMeterUtils.setLocale(Locale.ENGLISH);
TestPlan testPlan = new TestPlan();
JMeterVariables vars = new JMeterVariables();
vars.put("title_prefix", "a test\u00c5");
vars.put("description_suffix", "the_end");
JMeterContextService.getContext().setVariables(vars);
JMeterContextService.getContext().setSamplingStarted(true);
ValueReplacer replacer = new ValueReplacer();
replacer.setUserDefinedVariables(testPlan.getUserDefinedVariables());
sampler = createHttpSampler(samplerType);
contentEncoding = "UTF-8";
titleValue = "${title_prefix}mytitle5\u0153\u20a1\u0115\u00c5";
descriptionValue = "mydescription5\u0153\u20a1\u0115\u00c5${description_suffix}";
setupUrl(sampler, contentEncoding);
sampler.setMethod(HTTPSamplerBase.GET);
setupFormData(sampler, false, titleField, titleValue, descriptionField, descriptionValue);
// Replace the variables in the sampler
replacer.replaceValues(sampler);
res = executeSampler(sampler);
String expectedTitleValue = "a test\u00c5mytitle5\u0153\u20a1\u0115\u00c5";
String expectedDescriptionValue = "mydescription5\u0153\u20a1\u0115\u00c5the_end";
sampler.setRunningVersion(true);
executedUrl = sampler.getUrl();
sampler.setRunningVersion(false);
checkGetRequest_Parameters(sampler, res, contentEncoding, executedUrl, titleField, expectedTitleValue, descriptionField, expectedDescriptionValue, false);
break;
case 6:
break;
case 7:
break;
case 8:
break;
case 9:
break;
case 10:
break;
default:
fail("Unexpected switch value: "+test);
}
}
private HTTPSampleResult executeSampler(HTTPSamplerBase sampler) {
sampler.setRunningVersion(true);
sampler.threadStarted();
HTTPSampleResult res = (HTTPSampleResult) sampler.sample();
sampler.threadFinished();
sampler.setRunningVersion(false);
return res;
}
private void checkPostRequestUrlEncoded(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
boolean valuesAlreadyUrlEncoded) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
String expectedPostBody = null;
if(!valuesAlreadyUrlEncoded) {
String expectedTitle = URLEncoder.encode(titleValue, contentEncoding);
String expectedDescription = URLEncoder.encode(descriptionValue, contentEncoding);
expectedPostBody = titleField + "=" + expectedTitle + "&" + descriptionField + "=" + expectedDescription;
}
else {
expectedPostBody = titleField + "=" + titleValue + "&" + descriptionField + "=" + descriptionValue;
}
// Check the request
checkPostRequestBody(
sampler,
res,
samplerDefaultEncoding,
contentEncoding,
expectedPostBody
);
}
private void checkPostRequestFormMultipart(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
String boundaryString = getBoundaryStringFromContentType(res.getRequestHeaders());
assertNotNull(boundaryString);
byte[] expectedPostBody = createExpectedFormdataOutput(boundaryString, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true, true);
// Check request headers
checkHeaderTypeLength(res.getRequestHeaders(), "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
// Check post body from the result query string
checkArraysHaveSameContent(expectedPostBody, res.getQueryString().getBytes(contentEncoding), contentEncoding);
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), contentEncoding);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found");
}
// Check response headers
checkHeaderTypeLength(headersSent, "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
// Check post body which was sent to the mirror server, and
// sent back by the mirror server
checkArraysHaveSameContent(expectedPostBody, bodySent.getBytes(contentEncoding), contentEncoding);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkPostRequestFileUpload(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
String fileField,
File fileValue,
String fileMimeType,
byte[] fileContent) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
String boundaryString = getBoundaryStringFromContentType(res.getRequestHeaders());
assertNotNull(boundaryString);
byte[] expectedPostBody = createExpectedFormAndUploadOutput(boundaryString, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, fileField, fileValue, fileMimeType, fileContent);
// Check request headers
checkHeaderTypeLength(res.getRequestHeaders(), "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
// We cannot check post body from the result query string, since that will not contain
// the actual file content, but placeholder text for file content
//checkArraysHaveSameContent(expectedPostBody, res.getQueryString().getBytes(contentEncoding));
// Find the data sent to the mirror server, which the mirror server is sending back to us
String headersSent = getHeadersSent(res.getResponseData());
if(headersSent == null) {
fail("No header and body section found");
}
// Check response headers
checkHeaderTypeLength(headersSent, "multipart/form-data" + "; boundary=" + boundaryString, expectedPostBody.length);
byte[] bodySent = getBodySent(res.getResponseData());
assertNotNull("Sent body should not be null", bodySent);
// Check post body which was sent to the mirror server, and
// sent back by the mirror server
checkArraysHaveSameContent(expectedPostBody, bodySent, contentEncoding);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkPostRequestBody(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String samplerDefaultEncoding,
String contentEncoding,
String expectedPostBody) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = samplerDefaultEncoding;
}
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
// Check request headers
checkHeaderTypeLength(res.getRequestHeaders(), HTTPSamplerBase.APPLICATION_X_WWW_FORM_URLENCODED, expectedPostBody.getBytes(contentEncoding).length);
// Check post body from the result query string
checkArraysHaveSameContent(expectedPostBody.getBytes(contentEncoding), res.getQueryString().getBytes(contentEncoding), contentEncoding);
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), contentEncoding);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found");
}
// Check response headers
checkHeaderTypeLength(headersSent, HTTPSamplerBase.APPLICATION_X_WWW_FORM_URLENCODED, expectedPostBody.getBytes(contentEncoding).length);
// Check post body which was sent to the mirror server, and
// sent back by the mirror server
checkArraysHaveSameContent(expectedPostBody.getBytes(contentEncoding), bodySent.getBytes(contentEncoding), contentEncoding);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkGetRequest(
HTTPSamplerBase sampler,
HTTPSampleResult res
) throws IOException {
// Check URL
assertEquals(sampler.getUrl(), res.getURL());
// Check method
assertEquals(sampler.getMethod(), res.getHTTPMethod());
// Check that the query string is empty
assertEquals(0, res.getQueryString().length());
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), EncoderCache.URL_ARGUMENT_ENCODING);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found");
}
// No body should have been sent
assertEquals(bodySent.length(), 0);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), null);
}
private void checkGetRequest_Parameters(
HTTPSamplerBase sampler,
HTTPSampleResult res,
String contentEncoding,
URL executedUrl,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
boolean valuesAlreadyUrlEncoded) throws IOException {
if(contentEncoding == null || contentEncoding.length() == 0) {
contentEncoding = EncoderCache.URL_ARGUMENT_ENCODING;
}
// Check URL
assertEquals(executedUrl, res.getURL());
// Check method
assertEquals(sampler.getMethod(), res.getHTTPMethod());
// Cannot check the query string of the result, because the mirror server
// replies without including query string in URL
String expectedQueryString = null;
if(!valuesAlreadyUrlEncoded) {
String expectedTitle = URLEncoder.encode(titleValue, contentEncoding);
String expectedDescription = URLEncoder.encode(descriptionValue, contentEncoding);
expectedQueryString = titleField + "=" + expectedTitle + "&" + descriptionField + "=" + expectedDescription;
}
else {
expectedQueryString = titleField + "=" + titleValue + "&" + descriptionField + "=" + descriptionValue;
}
// Find the data sent to the mirror server, which the mirror server is sending back to us
String dataSentToMirrorServer = new String(res.getResponseData(), EncoderCache.URL_ARGUMENT_ENCODING);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
String bodySent = "";
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
// Skip the blank line with crlf dividing headers and body
bodySent = dataSentToMirrorServer.substring(posDividerHeadersAndBody+2);
}
else {
fail("No header and body section found");
}
// No body should have been sent
assertEquals(bodySent.length(), 0);
// Check method, path and query sent
checkMethodPathQuery(headersSent, sampler.getMethod(), sampler.getPath(), expectedQueryString);
}
private void checkMethodPathQuery(
String headersSent,
String expectedMethod,
String expectedPath,
String expectedQueryString)
throws IOException {
// Check the Request URI sent to the mirror server, and
// sent back by the mirror server
int indexFirstSpace = headersSent.indexOf(" ");
int indexSecondSpace = headersSent.indexOf(" ", headersSent.length() > indexFirstSpace ? indexFirstSpace + 1 : indexFirstSpace);
if(indexFirstSpace <= 0 && indexSecondSpace <= 0 || indexFirstSpace == indexSecondSpace) {
fail("Could not find method and URI sent");
}
String methodSent = headersSent.substring(0, indexFirstSpace);
assertEquals(expectedMethod, methodSent);
String uriSent = headersSent.substring(indexFirstSpace + 1, indexSecondSpace);
int indexQueryStart = uriSent.indexOf("?");
if(expectedQueryString != null && expectedQueryString.length() > 0) {
// We should have a query string part
if(indexQueryStart <= 0 || (indexQueryStart == uriSent.length() - 1)) {
fail("Could not find query string in URI");
}
}
else {
if(indexQueryStart > 0) {
// We should not have a query string part
fail("Query string present in URI");
}
else {
indexQueryStart = uriSent.length();
}
}
// Check path
String pathSent = uriSent.substring(0, indexQueryStart);
assertEquals(expectedPath, pathSent);
// Check query
if(expectedQueryString != null && expectedQueryString.length() > 0) {
String queryStringSent = uriSent.substring(indexQueryStart + 1);
// Is it only the parameter values which are encoded in the specified
// content encoding, the rest of the query is encoded in UTF-8
// Therefore we compare the whole query using UTF-8
checkArraysHaveSameContent(expectedQueryString.getBytes(EncoderCache.URL_ARGUMENT_ENCODING), queryStringSent.getBytes(EncoderCache.URL_ARGUMENT_ENCODING), EncoderCache.URL_ARGUMENT_ENCODING);
}
}
private String getHeadersSent(byte[] responseData) throws IOException {
// Find the data sent to the mirror server, which the mirror server is sending back to us
// We assume the headers are in ISO_8859_1, and the body can be in any content encoding.
String dataSentToMirrorServer = new String(responseData, ISO_8859_1);
int posDividerHeadersAndBody = getPositionOfBody(dataSentToMirrorServer);
String headersSent = null;
if(posDividerHeadersAndBody >= 0) {
headersSent = dataSentToMirrorServer.substring(0, posDividerHeadersAndBody);
}
return headersSent;
}
private byte[] getBodySent(byte[] responseData) throws IOException {
// Find the data sent to the mirror server, which the mirror server is sending back to us
// We assume the headers are in ISO_8859_1, and the body can be in any content encoding.
// Therefore we get the data sent in ISO_8859_1, to be able to determine the end of the
// header part, and then we just construct a byte array to hold the body part, not taking
// encoding of the body into consideration, because it can contain file data, which is
// sent as raw byte data
byte[] bodySent = null;
String headersSent = getHeadersSent(responseData);
if(headersSent != null) {
// Get the content length, it tells us how much data to read
// TODO : Maybe support chunked encoding, then we cannot rely on content length
String contentLengthValue = getSentRequestHeaderValue(headersSent, HTTPSamplerBase.HEADER_CONTENT_LENGTH);
int contentLength = -1;
if(contentLengthValue != null) {
contentLength = new Integer(contentLengthValue).intValue();
}
else {
fail("Did not receive any content-length header");
}
bodySent = new byte[contentLength];
System.arraycopy(responseData, responseData.length - contentLength, bodySent, 0, contentLength);
}
return bodySent;
}
private boolean isInRequestHeaders(String requestHeaders, String headerName, String headerValue) {
return checkRegularExpression(requestHeaders, headerName + ": " + headerValue);
}
// Java 1.6.0_22+ no longer allows Content-Length to be set, so don't check it.
// See: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6996110
// TODO any point in checking the other headers?
private void checkHeaderTypeLength(String requestHeaders, String contentType, int contentLen) {
boolean typeOK = isInRequestHeaders(requestHeaders, HTTPSamplerBase.HEADER_CONTENT_TYPE, contentType);
// boolean lengOK = isInRequestHeaders(requestHeaders, HTTPSamplerBase.HEADER_CONTENT_LENGTH, Integer.toString(contentLen));
if (!typeOK){
fail("Expected type:" + contentType + " in:\n"+ requestHeaders);
}
// if (!lengOK){
// fail("Expected & length: " +contentLen + " in:\n"+requestHeaders);
// }
}
private String getSentRequestHeaderValue(String requestHeaders, String headerName) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
String expression = ".*" + headerName + ": (\\d*).*";
Pattern pattern = JMeterUtils.getPattern(expression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.SINGLELINE_MASK);
if(localMatcher.matches(requestHeaders, pattern)) {
// The value is in the first group, group 0 is the whole match
return localMatcher.getMatch().group(1);
}
return null;
}
private boolean checkRegularExpression(String stringToCheck, String regularExpression) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
Pattern pattern = JMeterUtils.getPattern(regularExpression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.SINGLELINE_MASK);
return localMatcher.contains(stringToCheck, pattern);
}
private int getPositionOfBody(String stringToCheck) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
// The headers and body are divided by a blank line
String regularExpression = "^.$";
Pattern pattern = JMeterUtils.getPattern(regularExpression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.MULTILINE_MASK);
PatternMatcherInput input = new PatternMatcherInput(stringToCheck);
while(localMatcher.contains(input, pattern)) {
MatchResult match = localMatcher.getMatch();
return match.beginOffset(0);
}
// No divider was found
return -1;
}
private String getBoundaryStringFromContentType(String requestHeaders) {
Perl5Matcher localMatcher = JMeterUtils.getMatcher();
String regularExpression = "^" + HTTPSamplerBase.HEADER_CONTENT_TYPE + ": multipart/form-data; boundary=(.+)$";
Pattern pattern = JMeterUtils.getPattern(regularExpression, Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK | Perl5Compiler.MULTILINE_MASK);
if(localMatcher.contains(requestHeaders, pattern)) {
MatchResult match = localMatcher.getMatch();
String matchString = match.group(1);
// Header may contain ;charset= , regexp extracts it so computed boundary is wrong
int indexOf = matchString.indexOf(";");
if(indexOf>=0) {
return matchString.substring(0, indexOf);
} else {
return matchString;
}
}
else {
return null;
}
}
private void setupUrl(HTTPSamplerBase sampler, String contentEncoding) {
String protocol = "http";
// String domain = "localhost";
String domain = "localhost";
String path = "/test/somescript.jsp";
sampler.setProtocol(protocol);
sampler.setMethod(HTTPSamplerBase.POST);
sampler.setPath(path);
sampler.setDomain(domain);
sampler.setPort(MIRROR_PORT);
sampler.setContentEncoding(contentEncoding);
}
/**
* Setup the form data with specified values
*
* @param httpSampler
*/
private void setupFormData(HTTPSamplerBase httpSampler, boolean isEncoded, String titleField, String titleValue, String descriptionField, String descriptionValue) {
if(isEncoded) {
httpSampler.addEncodedArgument(titleField, titleValue);
httpSampler.addEncodedArgument(descriptionField, descriptionValue);
}
else {
httpSampler.addArgument(titleField, titleValue);
httpSampler.addArgument(descriptionField, descriptionValue);
}
}
/**
* Setup the form data with specified values, and file to upload
*
* @param httpSampler
*/
private void setupFileUploadData(
HTTPSamplerBase httpSampler,
boolean isEncoded,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
String fileField,
File fileValue,
String fileMimeType) {
// Set the form data
setupFormData(httpSampler, isEncoded, titleField, titleValue, descriptionField, descriptionValue);
// Set the file upload data
HTTPFileArg[] hfa = {new HTTPFileArg(fileValue == null ? "" : fileValue.getAbsolutePath(), fileField, fileMimeType)};
httpSampler.setHTTPFiles(hfa);
}
/**
* Check that the the two byte arrays have identical content
*
* @param expected
* @param actual
* @throws UnsupportedEncodingException
*/
private void checkArraysHaveSameContent(byte[] expected, byte[] actual, String encoding) throws UnsupportedEncodingException {
if(expected != null && actual != null) {
if(expected.length != actual.length) {
System.out.println("\n>>>>>>>>>>>>>>>>>>>> expected:");
System.out.println(new String(expected, encoding));
System.out.println("==================== actual:");
System.out.println(new String(actual, encoding));
System.out.println("<<<<<<<<<<<<<<<<<<<<");
fail("arrays have different length, expected is " + expected.length + ", actual is " + actual.length);
}
else {
for(int i = 0; i < expected.length; i++) {
if(expected[i] != actual[i]) {
System.out.println("\n>>>>>>>>>>>>>>>>>>>> expected:");
System.out.println(new String(expected,0,i+1, encoding));
System.out.println("==================== actual:");
System.out.println(new String(actual,0,i+1, encoding));
System.out.println("<<<<<<<<<<<<<<<<<<<<");
/*
// Useful to when debugging
for(int j = 0; j < expected.length; j++) {
System.out.print(expected[j] + " ");
}
System.out.println();
for(int j = 0; j < actual.length; j++) {
System.out.print(actual[j] + " ");
}
System.out.println();
*/
fail("byte at position " + i + " is different, expected is " + expected[i] + ", actual is " + actual[i]);
}
}
}
}
else {
fail("expected or actual byte arrays were null");
}
}
/**
* Create the expected output multipart/form-data, with only form data,
* and no file multipart.
* This method is copied from the PostWriterTest class
*
* @param lastMultipart true if this is the last multipart in the request
*/
private byte[] createExpectedFormdataOutput(
String boundaryString,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
boolean firstMultipart,
boolean lastMultipart) throws IOException {
// The encoding used for http headers and control information
final byte[] DASH_DASH = "--".getBytes(ISO_8859_1);
final ByteArrayOutputStream output = new ByteArrayOutputStream();
if(firstMultipart) {
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
output.write(CRLF);
}
output.write("Content-Disposition: form-data; name=\"".getBytes(ISO_8859_1));
output.write(titleField.getBytes(ISO_8859_1));
output.write("\"".getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Type: text/plain".getBytes(ISO_8859_1));
if(contentEncoding != null) {
output.write("; charset=".getBytes(ISO_8859_1));
output.write(contentEncoding.getBytes(ISO_8859_1));
}
output.write(CRLF);
output.write("Content-Transfer-Encoding: 8bit".getBytes(ISO_8859_1));
output.write(CRLF);
output.write(CRLF);
if(contentEncoding != null) {
output.write(titleValue.getBytes(contentEncoding));
}
else {
output.write(titleValue.getBytes()); // TODO - charset?
}
output.write(CRLF);
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Disposition: form-data; name=\"".getBytes(ISO_8859_1));
output.write(descriptionField.getBytes(ISO_8859_1));
output.write("\"".getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Type: text/plain".getBytes(ISO_8859_1));
if(contentEncoding != null) {
output.write("; charset=".getBytes(ISO_8859_1));
output.write(contentEncoding.getBytes(ISO_8859_1));
}
output.write(CRLF);
output.write("Content-Transfer-Encoding: 8bit".getBytes(ISO_8859_1));
output.write(CRLF);
output.write(CRLF);
if(contentEncoding != null) {
output.write(descriptionValue.getBytes(contentEncoding));
}
else {
output.write(descriptionValue.getBytes()); // TODO - charset?
}
output.write(CRLF);
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
if(lastMultipart) {
output.write(DASH_DASH);
}
output.write(CRLF);
output.flush();
output.close();
return output.toByteArray();
}
/**
* Create the expected file multipart
*
* @param lastMultipart true if this is the last multipart in the request
*/
private byte[] createExpectedFilepartOutput(
String boundaryString,
String fileField,
File file,
String mimeType,
byte[] fileContent,
boolean firstMultipart,
boolean lastMultipart) throws IOException {
final byte[] DASH_DASH = "--".getBytes(ISO_8859_1);
final ByteArrayOutputStream output = new ByteArrayOutputStream();
if(firstMultipart) {
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
output.write(CRLF);
}
// replace all backslash with double backslash
String filename = file.getName();
output.write("Content-Disposition: form-data; name=\"".getBytes(ISO_8859_1));
output.write(fileField.getBytes(ISO_8859_1));
output.write(("\"; filename=\"" + filename + "\"").getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Type: ".getBytes(ISO_8859_1));
output.write(mimeType.getBytes(ISO_8859_1));
output.write(CRLF);
output.write("Content-Transfer-Encoding: binary".getBytes(ISO_8859_1));
output.write(CRLF);
output.write(CRLF);
output.write(fileContent);
output.write(CRLF);
output.write(DASH_DASH);
output.write(boundaryString.getBytes(ISO_8859_1));
if(lastMultipart) {
output.write(DASH_DASH);
}
output.write(CRLF);
output.flush();
output.close();
return output.toByteArray();
}
/**
* Create the expected output post body for form data and file multiparts
* with specified values, when request is multipart
*/
private byte[] createExpectedFormAndUploadOutput(
String boundaryString,
String contentEncoding,
String titleField,
String titleValue,
String descriptionField,
String descriptionValue,
String fileField,
File fileValue,
String fileMimeType,
byte[] fileContent) throws IOException {
// Create the multiparts
byte[] formdataMultipart = createExpectedFormdataOutput(boundaryString, contentEncoding, titleField, titleValue, descriptionField, descriptionValue, true, false);
byte[] fileMultipart = createExpectedFilepartOutput(boundaryString, fileField, fileValue, fileMimeType, fileContent, false, true);
// Join the two multiparts
ByteArrayOutputStream output = new ByteArrayOutputStream();
output.write(formdataMultipart);
output.write(fileMultipart);
output.flush();
output.close();
return output.toByteArray();
}
private HTTPSamplerBase createHttpSampler(int samplerType) {
switch(samplerType) {
case HTTP_SAMPLER:
return new HTTPSampler();
case HTTP_SAMPLER2:
return new HTTPSampler2();
case HTTP_SAMPLER3:
return new HTTPSampler3();
}
throw new IllegalArgumentException("Unexpected type: "+samplerType);
}
}
|
More debug
git-svn-id: 5ccfe34f605a6c2f9041ff2965ab60012c62539a@1353055 13f79535-47bb-0310-9956-ffa450edef68
|
test/src/org/apache/jmeter/protocol/http/sampler/TestHTTPSamplersAgainstHttpMirrorServer.java
|
More debug
|
|
Java
|
apache-2.0
|
7fb04439109b55fa54e5d353b24512008bc65449
| 0
|
openecho/Kinetic1-Java
|
src/main/java/openecho/math/SimpleMatrix.java
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package openecho.math;
/**
*
* @author jmarsden
*/
public class SimpleMatrix {
private final int m;
private final int n;
private final double[][] data;
public SimpleMatrix(int m, int n) {
this.m = m;
this.n = n;
data = new double[m][n];
}
public SimpleMatrix(double[][] data) {
m = data.length;
n = data[0].length;
this.data = new double[m][n];
for (int i = 0; i < m; i++) {
System.arraycopy(data[i], 0, this.data[i], 0, n);
}
}
public int getM() {
return m;
}
public int getN() {
return n;
}
}
|
Cleanup of Rename
|
src/main/java/openecho/math/SimpleMatrix.java
|
Cleanup of Rename
|
||
Java
|
apache-2.0
|
8bbb1d6eacdd82eb9f0b4de8428440f55f01f239
| 0
|
facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.rendercore;
import static android.view.View.MeasureSpec.makeMeasureSpec;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.view.View;
import androidx.annotation.Nullable;
import androidx.collection.LongSparseArray;
import com.facebook.rendercore.MountDelegate.MountDelegateTarget;
import java.util.ArrayList;
import java.util.List;
public class MountState implements MountDelegateTarget {
public static final long ROOT_HOST_ID = 0L;
private final LongSparseArray<MountItem> mIndexToMountedItemMap;
private final Context mContext;
private final Host mRootHost;
// Updated in prepareMount(), thus during mounting they hold the information
// about the LayoutState that is being mounted, not mLastMountedLayoutState
@Nullable private long[] mRenderUnitIds;
private boolean mIsMounting;
private boolean mNeedsRemount;
private RenderTree mRenderTree;
private @Nullable MountDelegate mMountDelegate;
/**
* This boolean array is used to record the attach {@link RenderUnit.Binder} which were unbound in
* {@link #updateMountItemIfNeeded(Context, RenderTreeNode, MountItem)}.
*/
private boolean[] mTempUnboundAttachBinders = new boolean[4];
/**
* This boolean array is used to record the mount {@link RenderUnit.Binder} which were unbound in
* {@link #updateMountItemIfNeeded(Context, RenderTreeNode, MountItem)}.
*/
private boolean[] mTempUnboundMountBinders = new boolean[4];
public MountState(Host rootHost) {
mIndexToMountedItemMap = new LongSparseArray<>();
mContext = rootHost.getContext();
mRootHost = rootHost;
}
public @Nullable Object findMountContentById(long id) {
if (mIndexToMountedItemMap == null) {
return null;
}
final MountItem item = mIndexToMountedItemMap.get(id);
if (item != null) {
return item.getContent();
}
return null;
}
/**
* True if we have manually unmounted content (e.g. via unmountAllItems) which means that while we
* may not have a new RenderTree, the mounted content does not match what the viewport for the
* LithoView may be.
*/
@Override
public boolean needsRemount() {
return mNeedsRemount;
}
@Override
public void notifyMount(
MountDelegate.MountDelegateInput input, RenderTreeNode renderTreeNode, int position) {
if (getItemAt(position) != null) {
return;
}
mountRenderUnit(position, renderTreeNode);
}
@Override
public void notifyUnmount(int position) {
final MountItem mountItem = getItemAt(position);
if (mountItem != null) {
unmountItemRecursively(mountItem.getRenderTreeNode());
}
}
/**
* Mount the layoutState on the pre-set HostView.
*
* @param renderTree a new {@link RenderTree} to mount
*/
@Override
public void mount(RenderTree renderTree) {
if (renderTree == null) {
throw new IllegalStateException("Trying to mount a null RenderTreeNode");
}
if (mIsMounting) {
throw new IllegalStateException("Trying to mount while already mounting!");
}
if (renderTree == mRenderTree && !mNeedsRemount) {
return;
}
mRenderTree = renderTree;
mIsMounting = true;
prepareMount();
// Let's start from 1 as the RenderTreeNode in position 0 always represents the root.
for (int i = 1, size = renderTree.getMountableOutputCount(); i < size; i++) {
final RenderTreeNode renderTreeNode = renderTree.getRenderTreeNodeAtIndex(i);
final MountItem currentMountItem = getItemAt(i);
final boolean isMounted = currentMountItem != null;
if (!isMounted) {
mountRenderUnit(i, renderTreeNode);
} else {
updateMountItemIfNeeded(mContext, renderTreeNode, currentMountItem);
}
}
mNeedsRemount = false;
mIsMounting = false;
}
@Override
public void unmountAllItems() {
if (mRenderUnitIds == null) {
return;
}
// Let's unmount all the content from the Root host. Everything else will be recursively
// unmounted from there.
final RenderTreeNode rootRenderTreeNode = mRenderTree.getRoot();
for (int i = 0; i < rootRenderTreeNode.getChildrenCount(); i++) {
unmountItemRecursively(rootRenderTreeNode.getChildAt(i));
}
// Let's unbind and unmount the root host.
MountItem item = mIndexToMountedItemMap.get(ROOT_HOST_ID);
if (item != null) {
if (item.isBound()) {
unbindRenderUnitFromContent(mContext, item);
}
mIndexToMountedItemMap.remove(ROOT_HOST_ID);
unmountRenderUnitFromContent(
mContext, rootRenderTreeNode, rootRenderTreeNode.getRenderUnit(), item.getContent());
}
mNeedsRemount = true;
}
@Override
public boolean isRootItem(int position) {
final MountItem mountItem = getItemAt(position);
if (mountItem == null) {
return false;
}
return mountItem == mIndexToMountedItemMap.get(ROOT_HOST_ID);
}
@Override
public Object getContentAt(int position) {
final MountItem mountItem = getItemAt(position);
if (mountItem == null) {
return null;
}
return mountItem.getContent();
}
@Override
public int getContentCount() {
return mRenderUnitIds == null ? 0 : mRenderUnitIds.length;
}
@Override
public void registerMountDelegateExtension(MountDelegateExtension mountDelegateExtension) {
if (mMountDelegate == null) {
mMountDelegate = new MountDelegate(this);
}
mMountDelegate.addExtension(mountDelegateExtension);
}
@Override
public ArrayList<Host> getHosts() {
final ArrayList<Host> hosts = new ArrayList<>();
for (int i = 0, size = mIndexToMountedItemMap.size(); i < size; i++) {
final MountItem item = mIndexToMountedItemMap.valueAt(i);
final Object content = item.getContent();
if (content instanceof Host) {
hosts.add((Host) content);
}
}
return hosts;
}
@Override
public @Nullable MountItem getMountItemAt(int position) {
return getItemAt(position);
}
@Override
public int getMountItemCount() {
return mRenderUnitIds != null ? mRenderUnitIds.length : 0;
}
/**
* This is called when the {@link MountItem}s mounted on this {@link MountState} need to be
* re-bound with the same RenderUnit. This happens when a detach/attach happens on the root {@link
* Host} that owns the MountState.
*/
@Override
public void attach() {
if (mRenderUnitIds == null) {
return;
}
for (int i = 0, size = mRenderUnitIds.length; i < size; i++) {
final MountItem mountItem = getItemAt(i);
if (mountItem == null || mountItem.isBound()) {
continue;
}
final Object content = mountItem.getContent();
bindRenderUnitToContent(mContext, mountItem);
if (content instanceof View
&& !(content instanceof Host)
&& ((View) content).isLayoutRequested()) {
final View view = (View) content;
applyBoundsToMountContent(mountItem.getRenderTreeNode(), view, true);
}
}
}
/** Unbinds all the MountItems currently mounted on this MountState. */
@Override
public void detach() {
if (mRenderUnitIds == null) {
return;
}
for (int i = 0, size = mRenderUnitIds.length; i < size; i++) {
MountItem mountItem = getItemAt(i);
if (mountItem == null || !mountItem.isBound()) {
continue;
}
unbindRenderUnitFromContent(mContext, mountItem);
}
}
private static void updateBoundsForMountedRenderTreeNode(
RenderTreeNode renderTreeNode, MountItem item) {
// MountState should never update the bounds of the top-level host as this
// should be done by the ViewGroup containing the LithoView.
if (renderTreeNode.getRenderUnit().getId() == ROOT_HOST_ID) {
return;
}
final Object content = item.getContent();
final boolean forceTraversal = content instanceof View && ((View) content).isLayoutRequested();
applyBoundsToMountContent(
item.getRenderTreeNode(), item.getContent(), forceTraversal /* force */);
}
/** Prepare the {@link MountState} to mount a new {@link RenderTree}. */
private void prepareMount() {
unmountOrMoveOldItems();
final MountItem rootItem = mIndexToMountedItemMap.get(ROOT_HOST_ID);
final RenderTreeNode rootNode = mRenderTree.getRenderTreeNodeAtIndex(0);
final RenderUnit rootRenderUnit = rootNode.getRenderUnit();
// If root mount item is null then mounting root node for the first time.
if (rootItem == null) {
// Run mount callbacks.
mountRenderUnitToContent(mContext, rootNode, rootRenderUnit, mRootHost);
// Create root mount item.
final MountItem item = new MountItem(rootNode, null, mRootHost);
// Adds root mount item to map.
mIndexToMountedItemMap.put(ROOT_HOST_ID, item);
// Run binder callbacks
bindRenderUnitToContent(mContext, item);
} else {
// If root mount item is present then update it.
updateMountItemIfNeeded(mContext, rootNode, rootItem);
}
final int outputCount = mRenderTree.getMountableOutputCount();
if (mRenderUnitIds == null || outputCount != mRenderUnitIds.length) {
mRenderUnitIds = new long[outputCount];
}
for (int i = 0; i < outputCount; i++) {
mRenderUnitIds[i] = mRenderTree.getRenderTreeNodeAtIndex(i).getRenderUnit().getId();
}
}
/**
* Go over all the mounted items from the leaves to the root and unmount only the items that are
* not present in the new LayoutOutputs. If an item is still present but in a new position move
* the item inside its host. The condition where an item changed host doesn't need any special
* treatment here since we mark them as removed and re-added when calculating the new
* LayoutOutputs
*/
private void unmountOrMoveOldItems() {
if (mRenderUnitIds == null) {
return;
}
// Traversing from the beginning since mRenderUnitIds unmounting won't remove entries there
// but only from mIndexToMountedItemMap. If an host changes we're going to unmount it and
// recursively
// all its mounted children.
for (int i = 0; i < mRenderUnitIds.length; i++) {
final int newPosition = mRenderTree.getRenderTreeNodeIndex(mRenderUnitIds[i]);
final RenderTreeNode renderTreeNode =
newPosition > -1 ? mRenderTree.getRenderTreeNodeAtIndex(newPosition) : null;
final MountItem oldItem = getItemAt(i);
if (newPosition == -1) {
// if oldItem is null it was previously unmounted so there is nothing we need to do.
if (oldItem != null) {
unmountItemRecursively(oldItem.getRenderTreeNode());
}
} else {
final long newHostMarker =
renderTreeNode.getParent() == null
? 0L
: renderTreeNode.getParent().getRenderUnit().getId();
final Host newHost =
mIndexToMountedItemMap.get(newHostMarker) == null
? null
: (Host) mIndexToMountedItemMap.get(newHostMarker).getContent();
if (oldItem == null) {
// This was previously unmounted.
} else if (oldItem.getHost() != newHost) {
// If the id is the same but the parent host is different we simply unmount the item and
// re-mount it later. If the item to unmount is a ComponentHost, all the children will be
// recursively unmounted.
unmountItemRecursively(oldItem.getRenderTreeNode());
} else if (newPosition != i) {
// If a MountItem for this id exists and the hostMarker has not changed but its position
// in the outputs array has changed we need to update the position in the Host to ensure
// the z-ordering.
oldItem
.getHost()
.moveItem(
oldItem,
oldItem.getRenderTreeNode().getPositionInParent(),
renderTreeNode.getPositionInParent());
}
}
}
}
// The content might be null because it's the LayoutSpec for the root host
// (the very first RenderTreeNode).
private MountItem mountContentInHost(
int index, Object content, Host host, RenderTreeNode renderTreeNode) {
final MountItem item = new MountItem(renderTreeNode, host, content);
// Create and keep a MountItem even for the layoutSpec with null content
// that sets the root host interactions.
mIndexToMountedItemMap.put(mRenderUnitIds[index], item);
host.mount(renderTreeNode.getPositionInParent(), item);
return item;
}
private void mountRenderUnit(int index, RenderTreeNode renderTreeNode) {
// 1. Resolve the correct host to mount our content to.
final RenderTreeNode hostTreeNode = renderTreeNode.getParent();
final Host host =
(Host) mIndexToMountedItemMap.get(hostTreeNode.getRenderUnit().getId()).getContent();
if (host == null) {
throw new RuntimeException("Trying to mount a RenderTreeNode but its host is not mounted.");
}
// 2. call the RenderUnit's Mount bindings.
final RenderUnit renderUnit = renderTreeNode.getRenderUnit();
final Object content = MountItemsPool.acquireMountContent(mContext, renderUnit);
mountRenderUnitToContent(mContext, renderTreeNode, renderUnit, content);
// 3. Mount the content into the selected host.
final MountItem item = mountContentInHost(index, content, host, renderTreeNode);
// 4. Call attach binding functions
bindRenderUnitToContent(mContext, item);
// 5. Apply the bounds to the Mount content now. It's important to do so after bind as calling
// bind might have triggered a layout request within a View.
applyBoundsToMountContent(renderTreeNode, item.getContent(), true /* force */);
}
private void unmountItemRecursively(RenderTreeNode node) {
final RenderUnit unit = node.getRenderUnit();
final MountItem item = mIndexToMountedItemMap.get(unit.getId());
// Already has been unmounted.
if (item == null) {
return;
}
final Object content = item.getContent();
// The root host item should never be unmounted as it's a reference
// to the top-level LithoView.
if (unit.getId() == ROOT_HOST_ID) {
return;
}
mIndexToMountedItemMap.remove(unit.getId());
// Recursively unmount mounted children items.
// This is the case when mountDiffing is enabled and unmountOrMoveOldItems() has a matching
// sub tree. However, traversing the tree bottom-up, it needs to unmount a node holding that
// sub tree, that will still have mounted items. (Different sequence number on RenderTreeNode
// id)
if (node.getChildrenCount() > 0) {
final Host host = (Host) content;
// Concurrently remove items therefore traverse backwards.
for (int i = 0; i < node.getChildrenCount(); i++) {
unmountItemRecursively(node.getChildAt(i));
}
if (host.getMountItemCount() > 0) {
throw new IllegalStateException(
"Recursively unmounting items from a ComponentHost, left"
+ " some items behind maybe because not tracked by its MountState");
}
}
if (item.isBound()) {
unbindRenderUnitFromContent(mContext, item);
}
final Host host = item.getHost();
host.unmount(node.getPositionInParent(), item);
if (content instanceof View) {
((View) content).setPadding(0, 0, 0, 0);
}
unmountRenderUnitFromContent(mContext, node, unit, content);
item.releaseMountContent(mContext);
}
private @Nullable MountItem getItemAt(int i) {
if (mIndexToMountedItemMap == null || mRenderUnitIds == null) {
return null;
}
if (i >= mRenderUnitIds.length) {
return null;
}
return mIndexToMountedItemMap.get(mRenderUnitIds[i]);
}
private static void applyBoundsToMountContent(
RenderTreeNode renderTreeNode, Object content, boolean force) {
if (content instanceof View) {
applyBoundsToView((View) content, renderTreeNode, force);
} else if (content instanceof Drawable) {
final Rect bounds = new Rect();
renderTreeNode.getMountBounds(bounds); // Gets the relative bounds of the Render Tree Node.
final Rect padding = renderTreeNode.getResolvedPadding();
int left = bounds.left;
int top = bounds.top;
int right = bounds.right;
int bottom = bounds.bottom;
if (padding != null) {
left += padding.left;
top += padding.top;
right -= padding.right;
bottom -= padding.bottom;
}
((Drawable) content).setBounds(left, top, right, bottom);
} else {
throw new IllegalStateException("Unsupported mounted content " + content);
}
}
/**
* Sets the bounds on the given view if the view doesn't already have those bounds (or if 'force'
* is supplied).
*/
private static void applyBoundsToView(View view, RenderTreeNode renderTreeNode, boolean force) {
final Rect bounds = new Rect();
renderTreeNode.getMountBounds(bounds); // Gets the relative bounds of the Render Tree Node.
final int width = bounds.right - bounds.left;
final int height = bounds.bottom - bounds.top;
final Rect padding = renderTreeNode.getResolvedPadding();
if (padding != null && !(view instanceof Host)) {
view.setPadding(padding.left, padding.top, padding.right, padding.bottom);
}
if (force || view.getMeasuredHeight() != height || view.getMeasuredWidth() != width) {
view.measure(
makeMeasureSpec(width, View.MeasureSpec.EXACTLY),
makeMeasureSpec(height, View.MeasureSpec.EXACTLY));
}
if (force
|| view.getLeft() != bounds.left
|| view.getTop() != bounds.top
|| view.getRight() != bounds.right
|| view.getBottom() != bounds.bottom) {
view.layout(bounds.left, bounds.top, bounds.right, bounds.bottom);
}
}
private static void mountRenderUnitToContent(
final Context context,
final RenderTreeNode node,
final RenderUnit unit,
final Object content) {
final List<RenderUnit.Binder> mountUnmountFunctions = unit.mountUnmountFunctions();
if (mountUnmountFunctions != null) {
for (RenderUnit.Binder binder : mountUnmountFunctions) {
binder.bind(context, content, unit, node.getLayoutData());
}
}
}
private static void unmountRenderUnitFromContent(
final Context context,
final RenderTreeNode node,
final RenderUnit unit,
final Object content) {
final List<RenderUnit.Binder> mountUnmountFunctions = unit.mountUnmountFunctions();
if (mountUnmountFunctions != null) {
int size = mountUnmountFunctions.size();
for (int i = size - 1; i >= 0; i--) {
RenderUnit.Binder binder = mountUnmountFunctions.get(i);
binder.unbind(context, content, unit, node.getLayoutData());
}
}
}
private static void bindRenderUnitToContent(Context context, MountItem item) {
final RenderUnit renderUnit = item.getRenderUnit();
final List<RenderUnit.Binder> bindingFunctions = renderUnit.attachDetachFunctions();
if (bindingFunctions != null) {
for (RenderUnit.Binder binder : bindingFunctions) {
binder.bind(
context, item.getContent(), renderUnit, item.getRenderTreeNode().getLayoutData());
}
}
item.setIsBound(true);
}
private static void unbindRenderUnitFromContent(Context context, MountItem item) {
final RenderUnit renderUnit = item.getRenderUnit();
final List<RenderUnit.Binder> bindingFunctions = renderUnit.attachDetachFunctions();
if (bindingFunctions != null) {
int size = bindingFunctions.size();
for (int i = size - 1; i >= 0; i--) {
RenderUnit.Binder binder = bindingFunctions.get(i);
binder.unbind(
context, item.getContent(), renderUnit, item.getRenderTreeNode().getLayoutData());
}
}
item.setIsBound(false);
}
private void updateMountItemIfNeeded(
Context context, RenderTreeNode renderTreeNode, MountItem currentMountItem) {
final RenderUnit renderUnit = renderTreeNode.getRenderUnit();
final RenderTreeNode currentNode = currentMountItem.getRenderTreeNode();
final RenderUnit currentRenderUnit = currentNode.getRenderUnit();
final Object content = currentMountItem.getContent();
// Re initialize the MountItem internal state with the new attributes from RenderTreeNode
currentMountItem.update(renderTreeNode);
if (currentRenderUnit != renderUnit) {
final List<RenderUnit.Binder> attachBinders = renderUnit.attachDetachFunctions();
final List<RenderUnit.Binder> mountBinders = renderUnit.mountUnmountFunctions();
final int numberOfAttachBinders;
final int numberOfMountBinders;
if (attachBinders != null) {
numberOfAttachBinders = attachBinders.size();
if (mTempUnboundAttachBinders.length < numberOfAttachBinders) {
mTempUnboundAttachBinders = new boolean[numberOfAttachBinders];
}
} else {
numberOfAttachBinders = 0;
}
if (mountBinders != null) {
numberOfMountBinders = mountBinders.size();
if (mTempUnboundMountBinders.length < numberOfMountBinders) {
mTempUnboundMountBinders = new boolean[numberOfMountBinders];
}
} else {
numberOfMountBinders = 0;
}
// 1. unbind all attach binders which should update.
unbind(
context,
content,
currentNode,
renderTreeNode,
attachBinders,
mTempUnboundAttachBinders,
numberOfAttachBinders);
// 2. unbind all mount binders which should update.
unbind(
context,
content,
currentNode,
renderTreeNode,
mountBinders,
mTempUnboundMountBinders,
numberOfMountBinders);
// 3. rebind all mount binder which did update.
rebind(
context,
content,
renderTreeNode,
mountBinders,
mTempUnboundMountBinders,
numberOfMountBinders);
// 4. rebind all attach binder which did update.
rebind(
context,
content,
renderTreeNode,
attachBinders,
mTempUnboundAttachBinders,
numberOfAttachBinders);
}
// Update the bounds of the mounted content. This needs to be done regardless of whether
// the RenderUnit has been updated or not since the mounted item might might have the same
// size and content but a different position.
updateBoundsForMountedRenderTreeNode(renderTreeNode, currentMountItem);
}
private static void unbind(
final Context context,
final Object content,
final RenderTreeNode currentNode,
final RenderTreeNode newNode,
final List<RenderUnit.Binder> binders,
final boolean[] unbound,
final int size) {
if (size == 0) {
return;
}
final RenderUnit currentRenderUnit = currentNode.getRenderUnit();
final Object currentLayoutData = currentNode.getLayoutData();
final RenderUnit newRenderUnit = newNode.getRenderUnit();
final Object newLayoutData = newNode.getLayoutData();
// unbind should be called in the reverse order.
for (int i = size - 1; i >= 0; i--) {
final RenderUnit.Binder binder = binders.get(i);
final boolean shouldUpdate =
binder.shouldUpdate(currentRenderUnit, newRenderUnit, currentLayoutData, newLayoutData);
if (shouldUpdate) {
binder.unbind(context, content, currentRenderUnit, currentLayoutData);
unbound[i] = true;
} else {
unbound[i] = false;
}
}
}
private static void rebind(
final Context context,
final Object content,
final RenderTreeNode newNode,
final List<RenderUnit.Binder> binders,
final boolean[] toRebind,
final int size) {
if (size == 0) {
return;
}
final RenderUnit newRenderUnit = newNode.getRenderUnit();
final Object newLayoutData = newNode.getLayoutData();
for (int i = 0; i < size; i++) {
final boolean shouldUpdate = toRebind[i];
if (shouldUpdate) {
final RenderUnit.Binder binder = binders.get(i);
binder.bind(context, content, newRenderUnit, newLayoutData);
}
}
}
}
|
litho-rendercore/src/main/java/com/facebook/rendercore/MountState.java
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.rendercore;
import static android.view.View.MeasureSpec.makeMeasureSpec;
import android.content.Context;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.view.View;
import androidx.annotation.Nullable;
import androidx.collection.LongSparseArray;
import com.facebook.rendercore.MountDelegate.MountDelegateTarget;
import java.util.ArrayList;
import java.util.List;
public class MountState implements MountDelegateTarget {
public static final long ROOT_HOST_ID = 0L;
private final LongSparseArray<MountItem> mIndexToMountedItemMap;
private final Context mContext;
private final Host mRootHost;
// Updated in prepareMount(), thus during mounting they hold the information
// about the LayoutState that is being mounted, not mLastMountedLayoutState
@Nullable private long[] mRenderUnitIds;
private boolean mIsMounting;
private boolean mNeedsRemount;
private RenderTree mRenderTree;
private @Nullable MountDelegate mMountDelegate;
/**
* This boolean array is used to record the attach {@link RenderUnit.Binder} which were unbound in
* {@link #updateMountItemIfNeeded(Context, RenderTreeNode, MountItem)}.
*/
private boolean[] mTempUnboundAttachBinders = new boolean[4];
/**
* This boolean array is used to record the mount {@link RenderUnit.Binder} which were unbound in
* {@link #updateMountItemIfNeeded(Context, RenderTreeNode, MountItem)}.
*/
private boolean[] mTempUnboundMountBinders = new boolean[4];
public MountState(Host rootHost) {
mIndexToMountedItemMap = new LongSparseArray<>();
mContext = rootHost.getContext();
mRootHost = rootHost;
}
public @Nullable Object findMountContentById(long id) {
if (mIndexToMountedItemMap == null) {
return null;
}
final MountItem item = mIndexToMountedItemMap.get(id);
if (item != null) {
return item.getContent();
}
return null;
}
/**
* True if we have manually unmounted content (e.g. via unmountAllItems) which means that while we
* may not have a new RenderTree, the mounted content does not match what the viewport for the
* LithoView may be.
*/
@Override
public boolean needsRemount() {
return mNeedsRemount;
}
@Override
public void notifyMount(
MountDelegate.MountDelegateInput input, RenderTreeNode renderTreeNode, int position) {
if (getItemAt(position) != null) {
return;
}
mountRenderUnit(position, renderTreeNode);
}
@Override
public void notifyUnmount(int position) {
final MountItem mountItem = getItemAt(position);
if (mountItem != null) {
unmountItemRecursively(mountItem.getRenderTreeNode());
}
}
/**
* Mount the layoutState on the pre-set HostView.
*
* @param renderTree a new {@link RenderTree} to mount
*/
@Override
public void mount(RenderTree renderTree) {
if (renderTree == null) {
throw new IllegalStateException("Trying to mount a null RenderTreeNode");
}
if (mIsMounting) {
throw new IllegalStateException("Trying to mount while already mounting!");
}
if (renderTree == mRenderTree && !mNeedsRemount) {
return;
}
mRenderTree = renderTree;
mIsMounting = true;
prepareMount();
// Let's start from 1 as the RenderTreeNode in position 0 always represents the root.
for (int i = 1, size = renderTree.getMountableOutputCount(); i < size; i++) {
final RenderTreeNode renderTreeNode = renderTree.getRenderTreeNodeAtIndex(i);
final MountItem currentMountItem = getItemAt(i);
final boolean isMounted = currentMountItem != null;
if (!isMounted) {
mountRenderUnit(i, renderTreeNode);
} else {
updateMountItemIfNeeded(mContext, renderTreeNode, currentMountItem);
}
}
mNeedsRemount = false;
mIsMounting = false;
}
@Override
public void unmountAllItems() {
if (mRenderUnitIds == null) {
return;
}
// Let's unmount all the content from the Root host. Everything else will be recursively
// unmounted from there.
final RenderTreeNode rootRenderTreeNode = mRenderTree.getRoot();
for (int i = 0; i < rootRenderTreeNode.getChildrenCount(); i++) {
unmountItemRecursively(rootRenderTreeNode.getChildAt(i));
}
// Let's unbind and unmount the root host.
MountItem item = mIndexToMountedItemMap.get(ROOT_HOST_ID);
if (item != null) {
if (item.isBound()) {
unbindRenderUnitFromContent(mContext, item);
}
mIndexToMountedItemMap.remove(ROOT_HOST_ID);
unmountRenderUnitFromContent(
mContext, rootRenderTreeNode, rootRenderTreeNode.getRenderUnit(), item.getContent());
}
mNeedsRemount = true;
}
@Override
public boolean isRootItem(int position) {
final MountItem mountItem = getItemAt(position);
if (mountItem == null) {
return false;
}
return mountItem == mIndexToMountedItemMap.get(ROOT_HOST_ID);
}
@Override
public Object getContentAt(int position) {
final MountItem mountItem = getItemAt(position);
if (mountItem == null) {
return null;
}
return mountItem.getContent();
}
@Override
public int getContentCount() {
return mRenderUnitIds == null ? 0 : mRenderUnitIds.length;
}
@Override
public void registerMountDelegateExtension(MountDelegateExtension mountDelegateExtension) {
if (mMountDelegate == null) {
mMountDelegate = new MountDelegate(this);
}
mMountDelegate.addExtension(mountDelegateExtension);
}
@Override
public ArrayList<Host> getHosts() {
final ArrayList<Host> hosts = new ArrayList<>();
for (int i = 0, size = mIndexToMountedItemMap.size(); i < size; i++) {
final MountItem item = mIndexToMountedItemMap.valueAt(i);
final Object content = item.getContent();
if (content instanceof Host) {
hosts.add((Host) content);
}
}
return hosts;
}
@Override
public @Nullable MountItem getMountItemAt(int position) {
return getItemAt(position);
}
@Override
public int getMountItemCount() {
return mRenderUnitIds != null ? mRenderUnitIds.length : 0;
}
/**
* This is called when the {@link MountItem}s mounted on this {@link MountState} need to be
* re-bound with the same RenderUnit. This happens when a detach/attach happens on the root {@link
* Host} that owns the MountState.
*/
@Override
public void attach() {
if (mRenderUnitIds == null) {
return;
}
for (int i = 0, size = mRenderUnitIds.length; i < size; i++) {
final MountItem mountItem = getItemAt(i);
if (mountItem == null || mountItem.isBound()) {
continue;
}
final Object content = mountItem.getContent();
bindRenderUnitToContent(mContext, mountItem);
if (content instanceof View
&& !(content instanceof Host)
&& ((View) content).isLayoutRequested()) {
final View view = (View) content;
applyBoundsToMountContent(mountItem.getRenderTreeNode(), view, true);
}
}
}
/** Unbinds all the MountItems currently mounted on this MountState. */
@Override
public void detach() {
if (mRenderUnitIds == null) {
return;
}
for (int i = 0, size = mRenderUnitIds.length; i < size; i++) {
MountItem mountItem = getItemAt(i);
if (mountItem == null || !mountItem.isBound()) {
continue;
}
unbindRenderUnitFromContent(mContext, mountItem);
}
}
private static void updateBoundsForMountedRenderTreeNode(
RenderTreeNode renderTreeNode, MountItem item) {
// MountState should never update the bounds of the top-level host as this
// should be done by the ViewGroup containing the LithoView.
if (renderTreeNode.getRenderUnit().getId() == ROOT_HOST_ID) {
return;
}
final Object content = item.getContent();
final boolean forceTraversal = content instanceof View && ((View) content).isLayoutRequested();
applyBoundsToMountContent(
item.getRenderTreeNode(), item.getContent(), forceTraversal /* force */);
}
/** Prepare the {@link MountState} to mount a new {@link RenderTree}. */
private void prepareMount() {
unmountOrMoveOldItems();
final MountItem rootItem = mIndexToMountedItemMap.get(ROOT_HOST_ID);
final RenderTreeNode rootNode = mRenderTree.getRenderTreeNodeAtIndex(0);
final RenderUnit rootRenderUnit = rootNode.getRenderUnit();
// If root mount item is null then mounting root node for the first time.
if (rootItem == null) {
// Run mount callbacks.
mountRenderUnitToContent(mContext, rootNode, rootRenderUnit, mRootHost);
// Create root mount item.
final MountItem item = new MountItem(rootNode, null, mRootHost);
// Adds root mount item to map.
mIndexToMountedItemMap.put(ROOT_HOST_ID, item);
// Run binder callbacks
bindRenderUnitToContent(mContext, item);
} else {
// If root mount item is present then update it.
updateMountItemIfNeeded(mContext, rootNode, rootItem);
}
final int outputCount = mRenderTree.getMountableOutputCount();
if (mRenderUnitIds == null || outputCount != mRenderUnitIds.length) {
mRenderUnitIds = new long[outputCount];
}
for (int i = 0; i < outputCount; i++) {
mRenderUnitIds[i] = mRenderTree.getRenderTreeNodeAtIndex(i).getRenderUnit().getId();
}
}
/**
* Go over all the mounted items from the leaves to the root and unmount only the items that are
* not present in the new LayoutOutputs. If an item is still present but in a new position move
* the item inside its host. The condition where an item changed host doesn't need any special
* treatment here since we mark them as removed and re-added when calculating the new
* LayoutOutputs
*/
private void unmountOrMoveOldItems() {
if (mRenderUnitIds == null) {
return;
}
// Traversing from the beginning since mRenderUnitIds unmounting won't remove entries there
// but only from mIndexToMountedItemMap. If an host changes we're going to unmount it and
// recursively
// all its mounted children.
for (int i = 0; i < mRenderUnitIds.length; i++) {
final int newPosition = mRenderTree.getRenderTreeNodeIndex(mRenderUnitIds[i]);
final RenderTreeNode renderTreeNode =
newPosition > -1 ? mRenderTree.getRenderTreeNodeAtIndex(newPosition) : null;
final MountItem oldItem = getItemAt(i);
if (newPosition == -1) {
// if oldItem is null it was previously unmounted so there is nothing we need to do.
if (oldItem != null) {
unmountItemRecursively(oldItem.getRenderTreeNode());
}
} else {
final long newHostMarker =
renderTreeNode.getParent() == null
? 0L
: renderTreeNode.getParent().getRenderUnit().getId();
final Host newHost =
mIndexToMountedItemMap.get(newHostMarker) == null
? null
: (Host) mIndexToMountedItemMap.get(newHostMarker).getContent();
if (oldItem == null) {
// This was previously unmounted.
} else if (oldItem.getHost() != newHost) {
// If the id is the same but the parent host is different we simply unmount the item and
// re-mount it later. If the item to unmount is a ComponentHost, all the children will be
// recursively unmounted.
unmountItemRecursively(oldItem.getRenderTreeNode());
} else if (newPosition != i) {
// If a MountItem for this id exists and the hostMarker has not changed but its position
// in the outputs array has changed we need to update the position in the Host to ensure
// the z-ordering.
oldItem
.getHost()
.moveItem(
oldItem,
oldItem.getRenderTreeNode().getPositionInParent(),
renderTreeNode.getPositionInParent());
}
}
}
}
// The content might be null because it's the LayoutSpec for the root host
// (the very first RenderTreeNode).
private MountItem mountContentInHost(
int index, Object content, Host host, RenderTreeNode renderTreeNode) {
final MountItem item = new MountItem(renderTreeNode, host, content);
// Create and keep a MountItem even for the layoutSpec with null content
// that sets the root host interactions.
mIndexToMountedItemMap.put(mRenderUnitIds[index], item);
host.mount(renderTreeNode.getPositionInParent(), item);
return item;
}
private void mountRenderUnit(int index, RenderTreeNode renderTreeNode) {
// 1. Resolve the correct host to mount our content to.
final RenderTreeNode hostTreeNode = renderTreeNode.getParent();
final Host host =
(Host) mIndexToMountedItemMap.get(hostTreeNode.getRenderUnit().getId()).getContent();
if (host == null) {
throw new RuntimeException("Trying to mount a RenderTreeNode but its host is not mounted.");
}
// 2. call the RenderUnit's Mount bindings.
final RenderUnit renderUnit = renderTreeNode.getRenderUnit();
final Object content = MountItemsPool.acquireMountContent(mContext, renderUnit);
mountRenderUnitToContent(mContext, renderTreeNode, renderUnit, content);
// 3. Mount the content into the selected host.
final MountItem item = mountContentInHost(index, content, host, renderTreeNode);
// 4. Call attach binding functions
bindRenderUnitToContent(mContext, item);
// 5. Apply the bounds to the Mount content now. It's important to do so after bind as calling
// bind might have triggered a layout request within a View.
applyBoundsToMountContent(renderTreeNode, item.getContent(), true /* force */);
}
private void unmountItemRecursively(RenderTreeNode node) {
final RenderUnit unit = node.getRenderUnit();
final MountItem item = mIndexToMountedItemMap.get(unit.getId());
// Already has been unmounted.
if (item == null) {
return;
}
final Object content = item.getContent();
// The root host item should never be unmounted as it's a reference
// to the top-level LithoView.
if (unit.getId() == ROOT_HOST_ID) {
return;
}
mIndexToMountedItemMap.remove(unit.getId());
// Recursively unmount mounted children items.
// This is the case when mountDiffing is enabled and unmountOrMoveOldItems() has a matching
// sub tree. However, traversing the tree bottom-up, it needs to unmount a node holding that
// sub tree, that will still have mounted items. (Different sequence number on RenderTreeNode
// id)
if (node.getChildrenCount() > 0) {
final Host host = (Host) content;
// Concurrently remove items therefore traverse backwards.
for (int i = 0; i < node.getChildrenCount(); i++) {
unmountItemRecursively(node.getChildAt(i));
}
if (host.getMountItemCount() > 0) {
throw new IllegalStateException(
"Recursively unmounting items from a ComponentHost, left"
+ " some items behind maybe because not tracked by its MountState");
}
}
final Host host = item.getHost();
host.unmount(node.getPositionInParent(), item);
if (content instanceof View) {
((View) content).setPadding(0, 0, 0, 0);
}
if (item.isBound()) {
unbindRenderUnitFromContent(mContext, item);
}
unmountRenderUnitFromContent(mContext, node, unit, content);
item.releaseMountContent(mContext);
}
private @Nullable MountItem getItemAt(int i) {
if (mIndexToMountedItemMap == null || mRenderUnitIds == null) {
return null;
}
if (i >= mRenderUnitIds.length) {
return null;
}
return mIndexToMountedItemMap.get(mRenderUnitIds[i]);
}
private static void applyBoundsToMountContent(
RenderTreeNode renderTreeNode, Object content, boolean force) {
if (content instanceof View) {
applyBoundsToView((View) content, renderTreeNode, force);
} else if (content instanceof Drawable) {
final Rect bounds = new Rect();
renderTreeNode.getMountBounds(bounds); // Gets the relative bounds of the Render Tree Node.
final Rect padding = renderTreeNode.getResolvedPadding();
int left = bounds.left;
int top = bounds.top;
int right = bounds.right;
int bottom = bounds.bottom;
if (padding != null) {
left += padding.left;
top += padding.top;
right -= padding.right;
bottom -= padding.bottom;
}
((Drawable) content).setBounds(left, top, right, bottom);
} else {
throw new IllegalStateException("Unsupported mounted content " + content);
}
}
/**
* Sets the bounds on the given view if the view doesn't already have those bounds (or if 'force'
* is supplied).
*/
private static void applyBoundsToView(View view, RenderTreeNode renderTreeNode, boolean force) {
final Rect bounds = new Rect();
renderTreeNode.getMountBounds(bounds); // Gets the relative bounds of the Render Tree Node.
final int width = bounds.right - bounds.left;
final int height = bounds.bottom - bounds.top;
final Rect padding = renderTreeNode.getResolvedPadding();
if (padding != null && !(view instanceof Host)) {
view.setPadding(padding.left, padding.top, padding.right, padding.bottom);
}
if (force || view.getMeasuredHeight() != height || view.getMeasuredWidth() != width) {
view.measure(
makeMeasureSpec(width, View.MeasureSpec.EXACTLY),
makeMeasureSpec(height, View.MeasureSpec.EXACTLY));
}
if (force
|| view.getLeft() != bounds.left
|| view.getTop() != bounds.top
|| view.getRight() != bounds.right
|| view.getBottom() != bounds.bottom) {
view.layout(bounds.left, bounds.top, bounds.right, bounds.bottom);
}
}
private static void mountRenderUnitToContent(
final Context context,
final RenderTreeNode node,
final RenderUnit unit,
final Object content) {
final List<RenderUnit.Binder> mountUnmountFunctions = unit.mountUnmountFunctions();
if (mountUnmountFunctions != null) {
for (RenderUnit.Binder binder : mountUnmountFunctions) {
binder.bind(context, content, unit, node.getLayoutData());
}
}
}
private static void unmountRenderUnitFromContent(
final Context context,
final RenderTreeNode node,
final RenderUnit unit,
final Object content) {
final List<RenderUnit.Binder> mountUnmountFunctions = unit.mountUnmountFunctions();
if (mountUnmountFunctions != null) {
int size = mountUnmountFunctions.size();
for (int i = size - 1; i >= 0; i--) {
RenderUnit.Binder binder = mountUnmountFunctions.get(i);
binder.unbind(context, content, unit, node.getLayoutData());
}
}
}
private static void bindRenderUnitToContent(Context context, MountItem item) {
final RenderUnit renderUnit = item.getRenderUnit();
final List<RenderUnit.Binder> bindingFunctions = renderUnit.attachDetachFunctions();
if (bindingFunctions != null) {
for (RenderUnit.Binder binder : bindingFunctions) {
binder.bind(
context, item.getContent(), renderUnit, item.getRenderTreeNode().getLayoutData());
}
}
item.setIsBound(true);
}
private static void unbindRenderUnitFromContent(Context context, MountItem item) {
final RenderUnit renderUnit = item.getRenderUnit();
final List<RenderUnit.Binder> bindingFunctions = renderUnit.attachDetachFunctions();
if (bindingFunctions != null) {
int size = bindingFunctions.size();
for (int i = size - 1; i >= 0; i--) {
RenderUnit.Binder binder = bindingFunctions.get(i);
binder.unbind(
context, item.getContent(), renderUnit, item.getRenderTreeNode().getLayoutData());
}
}
item.setIsBound(false);
}
private void updateMountItemIfNeeded(
Context context, RenderTreeNode renderTreeNode, MountItem currentMountItem) {
final RenderUnit renderUnit = renderTreeNode.getRenderUnit();
final RenderTreeNode currentNode = currentMountItem.getRenderTreeNode();
final RenderUnit currentRenderUnit = currentNode.getRenderUnit();
final Object content = currentMountItem.getContent();
// Re initialize the MountItem internal state with the new attributes from RenderTreeNode
currentMountItem.update(renderTreeNode);
if (currentRenderUnit != renderUnit) {
final List<RenderUnit.Binder> attachBinders = renderUnit.attachDetachFunctions();
final List<RenderUnit.Binder> mountBinders = renderUnit.mountUnmountFunctions();
final int numberOfAttachBinders;
final int numberOfMountBinders;
if (attachBinders != null) {
numberOfAttachBinders = attachBinders.size();
if (mTempUnboundAttachBinders.length < numberOfAttachBinders) {
mTempUnboundAttachBinders = new boolean[numberOfAttachBinders];
}
} else {
numberOfAttachBinders = 0;
}
if (mountBinders != null) {
numberOfMountBinders = mountBinders.size();
if (mTempUnboundMountBinders.length < numberOfMountBinders) {
mTempUnboundMountBinders = new boolean[numberOfMountBinders];
}
} else {
numberOfMountBinders = 0;
}
// 1. unbind all attach binders which should update.
unbind(
context,
content,
currentNode,
renderTreeNode,
attachBinders,
mTempUnboundAttachBinders,
numberOfAttachBinders);
// 2. unbind all mount binders which should update.
unbind(
context,
content,
currentNode,
renderTreeNode,
mountBinders,
mTempUnboundMountBinders,
numberOfMountBinders);
// 3. rebind all mount binder which did update.
rebind(
context,
content,
renderTreeNode,
mountBinders,
mTempUnboundMountBinders,
numberOfMountBinders);
// 4. rebind all attach binder which did update.
rebind(
context,
content,
renderTreeNode,
attachBinders,
mTempUnboundAttachBinders,
numberOfAttachBinders);
}
// Update the bounds of the mounted content. This needs to be done regardless of whether
// the RenderUnit has been updated or not since the mounted item might might have the same
// size and content but a different position.
updateBoundsForMountedRenderTreeNode(renderTreeNode, currentMountItem);
}
private static void unbind(
final Context context,
final Object content,
final RenderTreeNode currentNode,
final RenderTreeNode newNode,
final List<RenderUnit.Binder> binders,
final boolean[] unbound,
final int size) {
if (size == 0) {
return;
}
final RenderUnit currentRenderUnit = currentNode.getRenderUnit();
final Object currentLayoutData = currentNode.getLayoutData();
final RenderUnit newRenderUnit = newNode.getRenderUnit();
final Object newLayoutData = newNode.getLayoutData();
// unbind should be called in the reverse order.
for (int i = size - 1; i >= 0; i--) {
final RenderUnit.Binder binder = binders.get(i);
final boolean shouldUpdate =
binder.shouldUpdate(currentRenderUnit, newRenderUnit, currentLayoutData, newLayoutData);
if (shouldUpdate) {
binder.unbind(context, content, currentRenderUnit, currentLayoutData);
unbound[i] = true;
} else {
unbound[i] = false;
}
}
}
private static void rebind(
final Context context,
final Object content,
final RenderTreeNode newNode,
final List<RenderUnit.Binder> binders,
final boolean[] toRebind,
final int size) {
if (size == 0) {
return;
}
final RenderUnit newRenderUnit = newNode.getRenderUnit();
final Object newLayoutData = newNode.getLayoutData();
for (int i = 0; i < size; i++) {
final boolean shouldUpdate = toRebind[i];
if (shouldUpdate) {
final RenderUnit.Binder binder = binders.get(i);
binder.bind(context, content, newRenderUnit, newLayoutData);
}
}
}
}
|
Reorders Host#unmount and attach binder unbind calls in RenderCore's MountState.
Summary:
Reorders `Host#unmount` and attach binder unbind calls in RenderCore's MountState. This changes the effective order of unmount
from:
```
Host.unmount()
AttachBinders()
MountBinders()
```
to:
```
AttachBinders()
Host.unmount()
MountBinders()
```
This makes it consistent with (the reverse of) mount call order.
Reviewed By: astreet
Differential Revision: D21662092
fbshipit-source-id: 7b5d83656f745b2519c9926071f85d491f584e88
|
litho-rendercore/src/main/java/com/facebook/rendercore/MountState.java
|
Reorders Host#unmount and attach binder unbind calls in RenderCore's MountState.
|
|
Java
|
apache-2.0
|
1866a190a0f9601968439c2843833b3baabfdcb1
| 0
|
axonivy/project-build-plugin,axonivy/project-build-plugin
|
/*
* Copyright (C) 2015 AXON IVY AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.ivyteam.ivy.maven;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.Scanner;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import ch.ivyteam.ivy.maven.engine.EngineVersionEvaluator;
import ch.ivyteam.ivy.maven.util.UrlRedirectionResolver;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.exception.ZipException;
/**
* Downloads an AXON.IVY Engine from the NET if it does not yet exists in the correct version.
*
* <p>Command line invocation is supported. E.g.</p>
* <pre>mvn com.axonivy.ivy.ci:project-build-plugin:6.2.0:installEngine
* -Divy.engine.directory=c:/axonviy/engine
* -Divy.engine.version=6.2.0
* -Divy.engine.os.arch=Linux_x64</pre>
*
* @author Reguel Wermelinger
* @since 6.0.0
*/
@Mojo(name=InstallEngineMojo.GOAL, requiresProject=false)
public class InstallEngineMojo extends AbstractEngineMojo
{
public static final String GOAL = "installEngine";
public static final String ENGINE_LIST_URL_PROPERTY = "ivy.engine.list.url";
/**
* URL where a packed ivy Engine can be downloaded. E.g.
* <code>https://developer.axonivy.com/download/6.0.10/AxonIvyEngine6.0.10.55478_Windows_x64.zip</code>
*/
@Parameter(property="ivy.engine.download.url")
URL engineDownloadUrl;
/**
* URL where a link to the ivy Engine in the expected {@link #ivyVersion} exists.
* The URL will be used to download the required engine if it does not yet exist.
* The URL should point to a site providing HTML content with a link to the engine <br>e.g.
* <code><a href="https://developer.axonivy.com/download/6.0.10/AxonIvyEngine6.0.10.55478_Windows_x64.zip"> the engine</a></code>
*/
@Parameter(defaultValue="https://developer.axonivy.com/download/maven.html", property=ENGINE_LIST_URL_PROPERTY)
URL engineListPageUrl;
/**
* Engine type that will be downloaded if {@link #autoInstallEngine} is set and the engine must be
* retrieved from the {@link #engineListPageUrl}.
* Possible values are:
* <ul>
* <li>All_x64</li>
* <li>Windows_x64</li>
* </ul>
* All_x64 supports Linux and Windows.
*/
@Parameter(defaultValue="All_x64", property="ivy.engine.os.arch")
String osArchitecture;
/**
* Enables the automatic installation of an ivy Engine in the {@link #engineDirectory}.
* If there is yet no engine installed, or the {@link #ivyVersion} does not match, the
* engine will be downloaded from the {@link #engineDownloadUrl} and unpacked into the
* {@link #engineDirectory}.
*/
@Parameter(defaultValue="true", property="ivy.engine.auto.install")
boolean autoInstallEngine;
@Override
public void execute() throws MojoExecutionException
{
getLog().info("Provide engine for ivy version " + ivyVersion);
ensureEngineIsInstalled();
}
private void ensureEngineIsInstalled() throws MojoExecutionException
{
VersionRange ivyVersionRange = getIvyVersionRange();
if (identifyAndGetEngineDirectory() == null)
{
handleNoInstalledEngine();
}
else
{
if (engineDirectoryIsEmpty())
{
getRawEngineDirectory().mkdirs();
}
ArtifactVersion installedEngineVersion = getInstalledEngineVersion(getRawEngineDirectory());
if (installedEngineVersion == null ||
!ivyVersionRange.containsVersion(installedEngineVersion))
{
handleWrongIvyVersion(installedEngineVersion);
}
}
}
private void handleNoInstalledEngine() throws MojoExecutionException
{
getLog().info("No installed engine found for version '"+ivyVersion+"'");
boolean cleanEngineDir = false;
downloadAndInstallEngine(cleanEngineDir);
}
private void handleWrongIvyVersion(ArtifactVersion installedEngineVersion) throws MojoExecutionException
{
getLog().info("Installed engine has version '"+installedEngineVersion+"' instead of expected '"+ivyVersion+"'");
boolean cleanEngineDir = installedEngineVersion != null;
downloadAndInstallEngine(cleanEngineDir);
}
private void downloadAndInstallEngine(boolean cleanEngineDir) throws MojoExecutionException
{
if (autoInstallEngine)
{
getLog().info("Will automatically download Engine now.");
EngineDownloader engineDownloader = new EngineDownloader();
File downloadZip = engineDownloader.downloadEngine();
if (cleanEngineDir)
{
removeOldEngineContent();
}
if (!isEngineDirectoryIdentified())
{
String engineZipFileName = engineDownloader.getZipFileNameFromDownloadUrl();
engineDirectory = new File(engineCacheDirectory, ivyEngineVersionOfZip(engineZipFileName));
engineDirectory.mkdirs();
}
unpackEngine(downloadZip);
downloadZip.delete();
ArtifactVersion installedEngineVersion = getInstalledEngineVersion(getRawEngineDirectory());
if (installedEngineVersion == null || !getIvyVersionRange().containsVersion(installedEngineVersion))
{
throw new MojoExecutionException("Automatic installation of an ivyEngine failed. "
+ "Downloaded version is '"+installedEngineVersion+"' but expecting '"+ivyVersion+"'.");
}
}
else
{
throw new MojoExecutionException("Aborting class generation as no valid ivy Engine is available! "
+ "Use the 'autoInstallEngine' parameter for an automatic installation.");
}
}
static String ivyEngineVersionOfZip(String engineZipFileName)
{
Matcher matcher = Pattern.compile("[a-zA-Z]*(([\\d]+\\.?)+)*").matcher(engineZipFileName);
if (matcher.find())
{
String version = matcher.group(1);
if (version != null)
{
return EngineVersionEvaluator.toReleaseVersion(matcher.group(1));
}
}
return engineZipFileName; // fallback: no version in file name
}
private void removeOldEngineContent() throws MojoExecutionException
{
try
{
FileUtils.cleanDirectory(getRawEngineDirectory());
}
catch (IOException ex)
{
throw new MojoExecutionException("Failed to clean outdated ivy Engine directory '"+getRawEngineDirectory()+"'.", ex);
}
}
private boolean engineDirectoryIsEmpty()
{
return !getRawEngineDirectory().isDirectory() || ArrayUtils.isEmpty(getRawEngineDirectory().listFiles());
}
private void unpackEngine(File downloadZip) throws MojoExecutionException
{
try
{
String targetLocation = getRawEngineDirectory().getAbsolutePath();
getLog().info("Unpacking engine " + downloadZip.getAbsolutePath() + " to " + targetLocation);
ZipFile engineZip = new ZipFile(downloadZip);
engineZip.extractAll(targetLocation);
}
catch (ZipException ex)
{
throw new MojoExecutionException("Failed to unpack downloaded engine '" + downloadZip + "'.", ex);
}
}
File getDownloadDirectory()
{
return SystemUtils.getJavaIoTmpDir();
}
class EngineDownloader
{
private String zipFileName = null;
private File downloadEngine() throws MojoExecutionException
{
URL downloadUrlToUse = (engineDownloadUrl != null) ? engineDownloadUrl : findEngineDownloadUrlFromListPage();
return downloadEngineFromUrl(downloadUrlToUse);
}
private URL findEngineDownloadUrlFromListPage() throws MojoExecutionException
{
try (InputStream pageStream = new UrlRedirectionResolver().followRedirections(engineListPageUrl))
{
return findEngineDownloadUrl(pageStream);
}
catch (IOException ex)
{
throw new MojoExecutionException("Failed to find engine download link in list page "+engineListPageUrl, ex);
}
}
URL findEngineDownloadUrl(InputStream htmlStream) throws MojoExecutionException, MalformedURLException
{
String engineFileNameRegex = "AxonIvyEngine[^.]+?\\.[^.]+?\\.+[^_]*?_"+osArchitecture+"\\.zip";
Pattern enginePattern = Pattern.compile("href=[\"|'][^\"']*?"+engineFileNameRegex+"[\"|']");
try(Scanner scanner = new Scanner(htmlStream))
{
String engineLink = null;
while (StringUtils.isBlank(engineLink))
{
String engineLinkMatch = scanner.findWithinHorizon(enginePattern, 0);
if (engineLinkMatch == null)
{
throw new MojoExecutionException("Could not find a link to engine for version '"+ivyVersion+"' on site '"+engineListPageUrl+"'");
}
String versionString = StringUtils.substringBetween(engineLinkMatch, "AxonIvyEngine", "_"+osArchitecture);
ArtifactVersion version = new DefaultArtifactVersion(EngineVersionEvaluator.toReleaseVersion(versionString));
if (getIvyVersionRange().containsVersion(version))
{
engineLink = StringUtils.replace(engineLinkMatch, "\"", "'");
engineLink = StringUtils.substringBetween(engineLink, "href='", "'");
}
}
return toAbsoluteLink(engineListPageUrl, engineLink);
}
}
private URL toAbsoluteLink(URL baseUrl, String parsedEngineArchivLink) throws MalformedURLException
{
boolean isAbsoluteLink = StringUtils.startsWithAny(parsedEngineArchivLink, "http://", "https://");
if (isAbsoluteLink)
{
return new URL(parsedEngineArchivLink);
}
return new URL(baseUrl, parsedEngineArchivLink);
}
private File downloadEngineFromUrl(URL engineUrl) throws MojoExecutionException
{
try
{
File downloadZip = evaluateTargetFile(engineUrl);
getLog().info("Starting engine download from "+engineUrl);
Files.copy(engineUrl.openStream(), downloadZip.toPath(), StandardCopyOption.REPLACE_EXISTING);
return downloadZip;
}
catch (IOException ex)
{
throw new MojoExecutionException("Failed to download engine from '" + engineUrl + "' to '"
+ getDownloadDirectory() + "'", ex);
}
}
private File evaluateTargetFile(URL engineUrl)
{
zipFileName = StringUtils.substringAfterLast(engineUrl.toExternalForm(), "/");
File downloadZip = new File(getDownloadDirectory(), zipFileName);
int tempFileSuffix = 0;
while (downloadZip.exists())
{
String suffixedZipFileName = zipFileName + "." + tempFileSuffix;
downloadZip = new File(getDownloadDirectory(), suffixedZipFileName);
tempFileSuffix++;
}
return downloadZip;
}
/**
* Extracts the name of the engine zip-file from the url used to download the engine.
* <br/>
* The zip-file name is only known <i>after</i> downloading the engine. Since the download-url might
* be extracted from an engine list-page.
* <br/>
* The returned zip-file name is not necessarily equal to the name of the downloaded zip-file, since the
* downloaded file could have been renamed to avoid name conflicts.
*
* @return engine zip file-name
*/
private String getZipFileNameFromDownloadUrl()
{
if (zipFileName == null)
{
throw new IllegalStateException("Engine zip file name is not set up.");
}
return zipFileName;
}
}
}
|
src/main/java/ch/ivyteam/ivy/maven/InstallEngineMojo.java
|
/*
* Copyright (C) 2015 AXON IVY AG
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.ivyteam.ivy.maven;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.util.Scanner;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.DefaultArtifactVersion;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import ch.ivyteam.ivy.maven.engine.EngineVersionEvaluator;
import ch.ivyteam.ivy.maven.util.UrlRedirectionResolver;
import net.lingala.zip4j.core.ZipFile;
import net.lingala.zip4j.exception.ZipException;
/**
* Downloads an AXON.IVY Engine from the NET if it does not yet exists in the correct version.
*
* <p>Command line invocation is supported. E.g.</p>
* <pre>mvn com.axonivy.ivy.ci:project-build-plugin:6.2.0:installEngine
* -Divy.engine.directory=c:/axonviy/engine
* -Divy.engine.version=6.2.0
* -Divy.engine.os.arch=Linux_x64</pre>
*
* @author Reguel Wermelinger
* @since 6.0.0
*/
@Mojo(name=InstallEngineMojo.GOAL, requiresProject=false)
public class InstallEngineMojo extends AbstractEngineMojo
{
public static final String GOAL = "installEngine";
public static final String ENGINE_LIST_URL_PROPERTY = "ivy.engine.list.url";
/**
* URL where a packed ivy Engine can be downloaded. E.g.
* <code>https://developer.axonivy.com/download/6.0.10/AxonIvyEngine6.0.10.55478_Windows_x64.zip</code>
*/
@Parameter(property="ivy.engine.download.url")
URL engineDownloadUrl;
/**
* URL where a link to the ivy Engine in the expected {@link #ivyVersion} exists.
* The URL will be used to download the required engine if it does not yet exist.
* The URL should point to a site providing HTML content with a link to the engine <br>e.g.
* <code><a href="https://developer.axonivy.com/download/6.0.10/AxonIvyEngine6.0.10.55478_Windows_x64.zip"> the engine</a></code>
*/
@Parameter(defaultValue="https://developer.axonivy.com/download/maven.html", property=ENGINE_LIST_URL_PROPERTY)
URL engineListPageUrl;
/**
* Engine type that will be downloaded if {@link #autoInstallEngine} is set and the engine must be
* retrieved from the {@link #engineListPageUrl}.
* Possible values are:
* <ul>
* <li>Linux_x64</li>
* <li>Windows_x64</li>
* <li>Slim_All_x64</li>
* <li>OSGi_All_x64</li>
* <li>OSGi_Slim_All_x64</li>
* </ul>
*/
@Parameter(defaultValue="OSGi_All_x64", property="ivy.engine.os.arch")
String osArchitecture;
/**
* Enables the automatic installation of an ivy Engine in the {@link #engineDirectory}.
* If there is yet no engine installed, or the {@link #ivyVersion} does not match, the
* engine will be downloaded from the {@link #engineDownloadUrl} and unpacked into the
* {@link #engineDirectory}.
*/
@Parameter(defaultValue="true", property="ivy.engine.auto.install")
boolean autoInstallEngine;
@Override
public void execute() throws MojoExecutionException
{
getLog().info("Provide engine for ivy version " + ivyVersion);
ensureEngineIsInstalled();
}
private void ensureEngineIsInstalled() throws MojoExecutionException
{
VersionRange ivyVersionRange = getIvyVersionRange();
if (identifyAndGetEngineDirectory() == null)
{
handleNoInstalledEngine();
}
else
{
if (engineDirectoryIsEmpty())
{
getRawEngineDirectory().mkdirs();
}
ArtifactVersion installedEngineVersion = getInstalledEngineVersion(getRawEngineDirectory());
if (installedEngineVersion == null ||
!ivyVersionRange.containsVersion(installedEngineVersion))
{
handleWrongIvyVersion(installedEngineVersion);
}
}
}
private void handleNoInstalledEngine() throws MojoExecutionException
{
getLog().info("No installed engine found for version '"+ivyVersion+"'");
boolean cleanEngineDir = false;
downloadAndInstallEngine(cleanEngineDir);
}
private void handleWrongIvyVersion(ArtifactVersion installedEngineVersion) throws MojoExecutionException
{
getLog().info("Installed engine has version '"+installedEngineVersion+"' instead of expected '"+ivyVersion+"'");
boolean cleanEngineDir = installedEngineVersion != null;
downloadAndInstallEngine(cleanEngineDir);
}
private void downloadAndInstallEngine(boolean cleanEngineDir) throws MojoExecutionException
{
if (autoInstallEngine)
{
getLog().info("Will automatically download Engine now.");
EngineDownloader engineDownloader = new EngineDownloader();
File downloadZip = engineDownloader.downloadEngine();
if (cleanEngineDir)
{
removeOldEngineContent();
}
if (!isEngineDirectoryIdentified())
{
String engineZipFileName = engineDownloader.getZipFileNameFromDownloadUrl();
engineDirectory = new File(engineCacheDirectory, ivyEngineVersionOfZip(engineZipFileName));
engineDirectory.mkdirs();
}
unpackEngine(downloadZip);
downloadZip.delete();
ArtifactVersion installedEngineVersion = getInstalledEngineVersion(getRawEngineDirectory());
if (installedEngineVersion == null || !getIvyVersionRange().containsVersion(installedEngineVersion))
{
throw new MojoExecutionException("Automatic installation of an ivyEngine failed. "
+ "Downloaded version is '"+installedEngineVersion+"' but expecting '"+ivyVersion+"'.");
}
}
else
{
throw new MojoExecutionException("Aborting class generation as no valid ivy Engine is available! "
+ "Use the 'autoInstallEngine' parameter for an automatic installation.");
}
}
static String ivyEngineVersionOfZip(String engineZipFileName)
{
Matcher matcher = Pattern.compile("[a-zA-Z]*(([\\d]+\\.?)+)*").matcher(engineZipFileName);
if (matcher.find())
{
String version = matcher.group(1);
if (version != null)
{
return EngineVersionEvaluator.toReleaseVersion(matcher.group(1));
}
}
return engineZipFileName; // fallback: no version in file name
}
private void removeOldEngineContent() throws MojoExecutionException
{
try
{
FileUtils.cleanDirectory(getRawEngineDirectory());
}
catch (IOException ex)
{
throw new MojoExecutionException("Failed to clean outdated ivy Engine directory '"+getRawEngineDirectory()+"'.", ex);
}
}
private boolean engineDirectoryIsEmpty()
{
return !getRawEngineDirectory().isDirectory() || ArrayUtils.isEmpty(getRawEngineDirectory().listFiles());
}
private void unpackEngine(File downloadZip) throws MojoExecutionException
{
try
{
String targetLocation = getRawEngineDirectory().getAbsolutePath();
getLog().info("Unpacking engine " + downloadZip.getAbsolutePath() + " to " + targetLocation);
ZipFile engineZip = new ZipFile(downloadZip);
engineZip.extractAll(targetLocation);
}
catch (ZipException ex)
{
throw new MojoExecutionException("Failed to unpack downloaded engine '" + downloadZip + "'.", ex);
}
}
File getDownloadDirectory()
{
return SystemUtils.getJavaIoTmpDir();
}
class EngineDownloader
{
private String zipFileName = null;
private File downloadEngine() throws MojoExecutionException
{
URL downloadUrlToUse = (engineDownloadUrl != null) ? engineDownloadUrl : findEngineDownloadUrlFromListPage();
return downloadEngineFromUrl(downloadUrlToUse);
}
private URL findEngineDownloadUrlFromListPage() throws MojoExecutionException
{
try (InputStream pageStream = new UrlRedirectionResolver().followRedirections(engineListPageUrl))
{
return findEngineDownloadUrl(pageStream);
}
catch (IOException ex)
{
throw new MojoExecutionException("Failed to find engine download link in list page "+engineListPageUrl, ex);
}
}
URL findEngineDownloadUrl(InputStream htmlStream) throws MojoExecutionException, MalformedURLException
{
String engineFileNameRegex = "AxonIvyEngine[^.]+?\\.[^.]+?\\.+[^_]*?_"+osArchitecture+"\\.zip";
Pattern enginePattern = Pattern.compile("href=[\"|'][^\"']*?"+engineFileNameRegex+"[\"|']");
try(Scanner scanner = new Scanner(htmlStream))
{
String engineLink = null;
while (StringUtils.isBlank(engineLink))
{
String engineLinkMatch = scanner.findWithinHorizon(enginePattern, 0);
if (engineLinkMatch == null)
{
throw new MojoExecutionException("Could not find a link to engine for version '"+ivyVersion+"' on site '"+engineListPageUrl+"'");
}
String versionString = StringUtils.substringBetween(engineLinkMatch, "AxonIvyEngine", "_"+osArchitecture);
ArtifactVersion version = new DefaultArtifactVersion(EngineVersionEvaluator.toReleaseVersion(versionString));
if (getIvyVersionRange().containsVersion(version))
{
engineLink = StringUtils.replace(engineLinkMatch, "\"", "'");
engineLink = StringUtils.substringBetween(engineLink, "href='", "'");
}
}
return toAbsoluteLink(engineListPageUrl, engineLink);
}
}
private URL toAbsoluteLink(URL baseUrl, String parsedEngineArchivLink) throws MalformedURLException
{
boolean isAbsoluteLink = StringUtils.startsWithAny(parsedEngineArchivLink, "http://", "https://");
if (isAbsoluteLink)
{
return new URL(parsedEngineArchivLink);
}
return new URL(baseUrl, parsedEngineArchivLink);
}
private File downloadEngineFromUrl(URL engineUrl) throws MojoExecutionException
{
try
{
File downloadZip = evaluateTargetFile(engineUrl);
getLog().info("Starting engine download from "+engineUrl);
Files.copy(engineUrl.openStream(), downloadZip.toPath(), StandardCopyOption.REPLACE_EXISTING);
return downloadZip;
}
catch (IOException ex)
{
throw new MojoExecutionException("Failed to download engine from '" + engineUrl + "' to '"
+ getDownloadDirectory() + "'", ex);
}
}
private File evaluateTargetFile(URL engineUrl)
{
zipFileName = StringUtils.substringAfterLast(engineUrl.toExternalForm(), "/");
File downloadZip = new File(getDownloadDirectory(), zipFileName);
int tempFileSuffix = 0;
while (downloadZip.exists())
{
String suffixedZipFileName = zipFileName + "." + tempFileSuffix;
downloadZip = new File(getDownloadDirectory(), suffixedZipFileName);
tempFileSuffix++;
}
return downloadZip;
}
/**
* Extracts the name of the engine zip-file from the url used to download the engine.
* <br/>
* The zip-file name is only known <i>after</i> downloading the engine. Since the download-url might
* be extracted from an engine list-page.
* <br/>
* The returned zip-file name is not necessarily equal to the name of the downloaded zip-file, since the
* downloaded file could have been renamed to avoid name conflicts.
*
* @return engine zip file-name
*/
private String getZipFileNameFromDownloadUrl()
{
if (zipFileName == null)
{
throw new IllegalStateException("Engine zip file name is not set up.");
}
return zipFileName;
}
}
}
|
Story XIVY-2025: Remove legacy engine build
- Remove OSGi prefix form osArchitecture
Reviewed by rew
|
src/main/java/ch/ivyteam/ivy/maven/InstallEngineMojo.java
|
Story XIVY-2025: Remove legacy engine build - Remove OSGi prefix form osArchitecture Reviewed by rew
|
|
Java
|
apache-2.0
|
dc9137f29418b7ee55491fc43501130f94e44c6c
| 0
|
networknt/json-schema-validator
|
/*
* Copyright (c) 2016 Network New Technologies Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.networknt.schema;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.MessageFormat;
import java.util.Collections;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.networknt.schema.url.URLFactory;
public class RefValidator extends BaseJsonValidator implements JsonValidator {
private static final Logger logger = LoggerFactory.getLogger(RefValidator.class);
protected JsonSchema schema;
private static final String REF_CURRENT = "#";
public RefValidator(String schemaPath, JsonNode schemaNode, JsonSchema parentSchema, ValidationContext validationContext) {
super(schemaPath, schemaNode, parentSchema, ValidatorTypeCode.REF, validationContext);
String refValue = schemaNode.asText();
schema = getRefSchema(parentSchema, validationContext, refValue);
if (schema == null) {
throw new JsonSchemaException(ValidationMessage.of(ValidatorTypeCode.REF.getValue(), CustomErrorMessageType.of("internal.unresolvedRef", new MessageFormat("{0}: Reference {1} cannot be resolved")), schemaPath, refValue));
}
}
static JsonSchema getRefSchema(JsonSchema parentSchema, ValidationContext validationContext, String refValue) {
if (!refValue.startsWith(REF_CURRENT)) {
// This will be the url extracted from the refValue (this may be a relative or absolute Url).
final String refUrl;
final int index = refValue.indexOf(REF_CURRENT);
if (index > 0) {
refUrl = refValue.substring(0, index);
} else {
refUrl = refValue;
}
// This will determine the correct absolute url for the refUrl. This decision will take into
// account the current url of the parent schema.
URL schemaUrl = determineSchemaUrl(parentSchema, refUrl);
if (schemaUrl == null) {
return null;
}
// This should retrieve schemas regardless of the protocol that is in the url.
parentSchema = validationContext.getJsonSchemaFactory().getSchema(schemaUrl, validationContext.getConfig());
if (index < 0) {
return parentSchema.findAncestor();
} else {
refValue = refValue.substring(index);
}
}
if (refValue.equals(REF_CURRENT)) {
return parentSchema.findAncestor();
} else {
JsonNode node = parentSchema.getRefSchemaNode(refValue);
if (node != null) {
return new JsonSchema(validationContext, refValue, parentSchema.getCurrentUrl(), node, parentSchema);
}
}
return null;
}
private static URL determineSchemaUrl(JsonSchema parentSchema, String refUrl) {
URL schemaUrl;
try {
// If the refUrl is an absolute url, then this will succeed.
schemaUrl = URLFactory.toURL(refUrl);
} catch (MalformedURLException e) {
try {
// If the refUrl is a valid relative url in the context of the parent schema's url,
// then this will succeed.
schemaUrl = URLFactory.toURL(parentSchema.getCurrentUrl(), refUrl);
} catch (MalformedURLException e2) {
// We are unable to resolve the reference at this point.
schemaUrl = null;
}
}
return schemaUrl;
}
public Set<ValidationMessage> validate(JsonNode node, JsonNode rootNode, String at) {
debug(logger, node, rootNode, at);
if (schema != null) {
return schema.validate(node, rootNode, at);
} else {
return Collections.emptySet();
}
}
}
|
src/main/java/com/networknt/schema/RefValidator.java
|
/*
* Copyright (c) 2016 Network New Technologies Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.networknt.schema;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.MessageFormat;
import java.util.Collections;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.networknt.schema.url.URLFactory;
public class RefValidator extends BaseJsonValidator implements JsonValidator {
private static final Logger logger = LoggerFactory.getLogger(RefValidator.class);
protected JsonSchema schema;
private static final String REF_CURRENT = "#";
public RefValidator(String schemaPath, JsonNode schemaNode, JsonSchema parentSchema, ValidationContext validationContext) {
super(schemaPath, schemaNode, parentSchema, ValidatorTypeCode.REF, validationContext);
String refValue = schemaNode.asText();
schema = getRefSchema(parentSchema, validationContext, refValue);
if (schema == null) {
throw new JsonSchemaException(ValidationMessage.of(ValidatorTypeCode.REF.getValue(), CustomErrorMessageType.of("internal.unresolvedRef", new MessageFormat("{0}: Reference {1} cannot be resolved")), schemaPath, refValue));
}
}
static JsonSchema getRefSchema(JsonSchema parentSchema, ValidationContext validationContext, String refValue) {
if (!refValue.startsWith(REF_CURRENT)) {
// This will be the url extracted from the refValue (this may be a relative or absolute Url).
final String refUrl;
final int index = refValue.indexOf(REF_CURRENT);
if (index > 0) {
refUrl = refValue.substring(0, index);
} else {
refUrl = refValue;
}
// This will determine the correct absolute url for the refUrl. This decision will take into
// account the current url of the parent schema.
URL schemaUrl = determineSchemaUrl(parentSchema, refUrl);
if (schemaUrl == null) {
return null;
}
// This should retrieve schemas regardless of the protocol that is in the url.
parentSchema = validationContext.getJsonSchemaFactory().getSchema(schemaUrl);
if (index < 0) {
return parentSchema.findAncestor();
} else {
refValue = refValue.substring(index);
}
}
if (refValue.equals(REF_CURRENT)) {
return parentSchema.findAncestor();
} else {
JsonNode node = parentSchema.getRefSchemaNode(refValue);
if (node != null) {
return new JsonSchema(validationContext, refValue, parentSchema.getCurrentUrl(), node, parentSchema);
}
}
return null;
}
private static URL determineSchemaUrl(JsonSchema parentSchema, String refUrl) {
URL schemaUrl;
try {
// If the refUrl is an absolute url, then this will succeed.
schemaUrl = URLFactory.toURL(refUrl);
} catch (MalformedURLException e) {
try {
// If the refUrl is a valid relative url in the context of the parent schema's url,
// then this will succeed.
schemaUrl = URLFactory.toURL(parentSchema.getCurrentUrl(), refUrl);
} catch (MalformedURLException e2) {
// We are unable to resolve the reference at this point.
schemaUrl = null;
}
}
return schemaUrl;
}
public Set<ValidationMessage> validate(JsonNode node, JsonNode rootNode, String at) {
debug(logger, node, rootNode, at);
if (schema != null) {
return schema.validate(node, rootNode, at);
} else {
return Collections.emptySet();
}
}
}
|
Updated my RefValidator implementation to utilize the recently introduced URL mapping changes.
|
src/main/java/com/networknt/schema/RefValidator.java
|
Updated my RefValidator implementation to utilize the recently introduced URL mapping changes.
|
|
Java
|
apache-2.0
|
2434e63187475e416ee0e2e8fd5cc51ec5e5bc19
| 0
|
sdinot/hipparchus,sdinot/hipparchus,apache/commons-math,sdinot/hipparchus,sdinot/hipparchus,apache/commons-math,Hipparchus-Math/hipparchus,apache/commons-math,Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus,apache/commons-math,Hipparchus-Math/hipparchus
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math.linear;
import java.io.Serializable;
import org.apache.commons.math.exception.DimensionMismatchException;
import org.apache.commons.math.exception.NullArgumentException;
import org.apache.commons.math.exception.NoDataException;
import org.apache.commons.math.exception.MathIllegalStateException;
import org.apache.commons.math.exception.util.LocalizedFormats;
import org.apache.commons.math.util.MathUtils;
/**
* Implementation of {@link RealMatrix} using a {@code double[][]} array to
* store entries.
*
* @version $Id$
*/
public class Array2DRowRealMatrix extends AbstractRealMatrix implements Serializable {
/** Serializable version identifier. */
private static final long serialVersionUID = -1067294169172445528L;
/** Entries of the matrix. */
protected double data[][];
/**
* Creates a matrix with no data
*/
public Array2DRowRealMatrix() {}
/**
* Create a new RealMatrix with the supplied row and column dimensions.
*
* @param rowDimension Number of rows in the new matrix.
* @param columnDimension Number of columns in the new matrix.
* @throws org.apache.commons.math.exception.NotStrictlyPositiveException
* if the row or column dimension is not positive.
*/
public Array2DRowRealMatrix(final int rowDimension, final int columnDimension) {
super(rowDimension, columnDimension);
data = new double[rowDimension][columnDimension];
}
/**
* Create a new {@code RealMatrix} using the input array as the underlying
* data array.
* <p>The input array is copied, not referenced. This constructor has
* the same effect as calling {@link #Array2DRowRealMatrix(double[][], boolean)}
* with the second argument set to {@code true}.</p>
*
* @param d Data for the new matrix.
* @throws DimensionMismatchException if {@code d} is not rectangular.
* @throws NoDataException if {@code d} row or colum dimension is zero.
* @throws NullArgumentException if {@code d} is {@code null}.
* @see #Array2DRowRealMatrix(double[][], boolean)
*/
public Array2DRowRealMatrix(final double[][] d)
throws DimensionMismatchException, NoDataException, NullArgumentException {
copyIn(d);
}
/**
* Create a new RealMatrix using the input array as the underlying
* data array.
* If an array is built specially in order to be embedded in a
* RealMatrix and not used directly, the {@code copyArray} may be
* set to {@code false}. This will prevent the copying and improve
* performance as no new array will be built and no data will be copied.
*
* @param d Data for new matrix.
* @param copyArray if {@code true}, the input array will be copied,
* otherwise it will be referenced.
* @throws DimensionMismatchException if {@code d} is not rectangular
* (not all rows have the same length) or empty.
* @throws NullArgumentException if {@code d} is {@code null}.
* @throws NoDataException if there are not at least one row and one column.
* @see #Array2DRowRealMatrix(double[][])
*/
public Array2DRowRealMatrix(final double[][] d, final boolean copyArray) {
if (copyArray) {
copyIn(d);
} else {
if (d == null) {
throw new NullArgumentException();
}
final int nRows = d.length;
if (nRows == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_ROW);
}
final int nCols = d[0].length;
if (nCols == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_COLUMN);
}
for (int r = 1; r < nRows; r++) {
if (d[r].length != nCols) {
throw new DimensionMismatchException(d[r].length, nCols);
}
}
data = d;
}
}
/**
* Create a new (column) RealMatrix using {@code v} as the
* data for the unique column of the created matrix.
* The input array is copied.
*
* @param v Column vector holding data for new matrix.
*/
public Array2DRowRealMatrix(final double[] v) {
final int nRows = v.length;
data = new double[nRows][1];
for (int row = 0; row < nRows; row++) {
data[row][0] = v[row];
}
}
/** {@inheritDoc} */
@Override
public RealMatrix createMatrix(final int rowDimension,
final int columnDimension) {
return new Array2DRowRealMatrix(rowDimension, columnDimension);
}
/** {@inheritDoc} */
@Override
public RealMatrix copy() {
return new Array2DRowRealMatrix(copyOut(), false);
}
/**
* Compute the sum of this matrix with {@code m}.
*
* @param m Matrix to be added.
* @return {@code this} + m.
* @throws MatrixDimensionMismatchException
* if {@code m} is not the same size as this matrix.
*/
public Array2DRowRealMatrix add(final Array2DRowRealMatrix m) {
// Safety check.
MatrixUtils.checkAdditionCompatible(this, m);
final int rowCount = getRowDimension();
final int columnCount = getColumnDimension();
final double[][] outData = new double[rowCount][columnCount];
for (int row = 0; row < rowCount; row++) {
final double[] dataRow = data[row];
final double[] mRow = m.data[row];
final double[] outDataRow = outData[row];
for (int col = 0; col < columnCount; col++) {
outDataRow[col] = dataRow[col] + mRow[col];
}
}
return new Array2DRowRealMatrix(outData, false);
}
/**
* Subtract {@code m} from this matrix.
*
* @param m Matrix to be subtracted.
* @return {@code this} - m.
* @throws MatrixDimensionMismatchException
* if {@code m} is not the same size as this matrix.
*/
public Array2DRowRealMatrix subtract(final Array2DRowRealMatrix m) {
// Safety check.
MatrixUtils.checkSubtractionCompatible(this, m);
final int rowCount = getRowDimension();
final int columnCount = getColumnDimension();
final double[][] outData = new double[rowCount][columnCount];
for (int row = 0; row < rowCount; row++) {
final double[] dataRow = data[row];
final double[] mRow = m.data[row];
final double[] outDataRow = outData[row];
for (int col = 0; col < columnCount; col++) {
outDataRow[col] = dataRow[col] - mRow[col];
}
}
return new Array2DRowRealMatrix(outData, false);
}
/**
* Postmultiplying this matrix by {@code m}.
*
* @param m Matrix to postmultiply by.
* @return {@code this} * m.
* @throws DimensionMismatchException if the number of columns of this
* matrix is not equal to the number of rows of {@code m}.
*/
public Array2DRowRealMatrix multiply(final Array2DRowRealMatrix m) {
// Safety check.
MatrixUtils.checkMultiplicationCompatible(this, m);
final int nRows = this.getRowDimension();
final int nCols = m.getColumnDimension();
final int nSum = this.getColumnDimension();
final double[][] outData = new double[nRows][nCols];
for (int row = 0; row < nRows; row++) {
final double[] dataRow = data[row];
final double[] outDataRow = outData[row];
for (int col = 0; col < nCols; col++) {
double sum = 0;
for (int i = 0; i < nSum; i++) {
sum += dataRow[i] * m.data[i][col];
}
outDataRow[col] = sum;
}
}
return new Array2DRowRealMatrix(outData, false);
}
/** {@inheritDoc} */
@Override
public double[][] getData() {
return copyOut();
}
/**
* Get a reference to the underlying data array.
*
* @return 2-dimensional array of entries.
*/
public double[][] getDataRef() {
return data;
}
/** {@inheritDoc} */
@Override
public void setSubMatrix(final double[][] subMatrix,
final int row, final int column) {
if (data == null) {
if (row > 0) {
throw new MathIllegalStateException(LocalizedFormats.FIRST_ROWS_NOT_INITIALIZED_YET, row);
}
if (column > 0) {
throw new MathIllegalStateException(LocalizedFormats.FIRST_COLUMNS_NOT_INITIALIZED_YET, column);
}
MathUtils.checkNotNull(subMatrix);
final int nRows = subMatrix.length;
if (nRows == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_ROW);
}
final int nCols = subMatrix[0].length;
if (nCols == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_COLUMN);
}
data = new double[subMatrix.length][nCols];
for (int i = 0; i < data.length; ++i) {
if (subMatrix[i].length != nCols) {
throw new DimensionMismatchException(subMatrix[i].length, nCols);
}
System.arraycopy(subMatrix[i], 0, data[i + row], column, nCols);
}
} else {
super.setSubMatrix(subMatrix, row, column);
}
}
/** {@inheritDoc} */
@Override
public double getEntry(final int row, final int column) {
MatrixUtils.checkMatrixIndex(this, row, column);
return data[row][column];
}
/** {@inheritDoc} */
@Override
public void setEntry(final int row, final int column, final double value) {
MatrixUtils.checkMatrixIndex(this, row, column);
data[row][column] = value;
}
/** {@inheritDoc} */
@Override
public void addToEntry(final int row, final int column, final double increment) {
MatrixUtils.checkMatrixIndex(this, row, column);
data[row][column] += increment;
}
/** {@inheritDoc} */
@Override
public void multiplyEntry(final int row, final int column, final double factor) {
MatrixUtils.checkMatrixIndex(this, row, column);
data[row][column] *= factor;
}
/** {@inheritDoc} */
@Override
public int getRowDimension() {
return (data == null) ? 0 : data.length;
}
/** {@inheritDoc} */
@Override
public int getColumnDimension() {
return ((data == null) || (data[0] == null)) ? 0 : data[0].length;
}
/** {@inheritDoc} */
@Override
public double[] operate(final double[] v) {
final int nRows = this.getRowDimension();
final int nCols = this.getColumnDimension();
if (v.length != nCols) {
throw new DimensionMismatchException(v.length, nCols);
}
final double[] out = new double[nRows];
for (int row = 0; row < nRows; row++) {
final double[] dataRow = data[row];
double sum = 0;
for (int i = 0; i < nCols; i++) {
sum += dataRow[i] * v[i];
}
out[row] = sum;
}
return out;
}
/** {@inheritDoc} */
@Override
public double[] preMultiply(final double[] v) {
final int nRows = getRowDimension();
final int nCols = getColumnDimension();
if (v.length != nRows) {
throw new DimensionMismatchException(v.length, nRows);
}
final double[] out = new double[nCols];
for (int col = 0; col < nCols; ++col) {
double sum = 0;
for (int i = 0; i < nRows; ++i) {
sum += data[i][col] * v[i];
}
out[col] = sum;
}
return out;
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixChangingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int i = 0; i < rows; ++i) {
final double[] rowI = data[i];
for (int j = 0; j < columns; ++j) {
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixPreservingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int i = 0; i < rows; ++i) {
final double[] rowI = data[i];
for (int j = 0; j < columns; ++j) {
visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixChangingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int i = startRow; i <= endRow; ++i) {
final double[] rowI = data[i];
for (int j = startColumn; j <= endColumn; ++j) {
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixPreservingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int i = startRow; i <= endRow; ++i) {
final double[] rowI = data[i];
for (int j = startColumn; j <= endColumn; ++j) {
visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixChangingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int j = 0; j < columns; ++j) {
for (int i = 0; i < rows; ++i) {
final double[] rowI = data[i];
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixPreservingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int j = 0; j < columns; ++j) {
for (int i = 0; i < rows; ++i) {
visitor.visit(i, j, data[i][j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixChangingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int j = startColumn; j <= endColumn; ++j) {
for (int i = startRow; i <= endRow; ++i) {
final double[] rowI = data[i];
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixPreservingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int j = startColumn; j <= endColumn; ++j) {
for (int i = startRow; i <= endRow; ++i) {
visitor.visit(i, j, data[i][j]);
}
}
return visitor.end();
}
/**
* Get a fresh copy of the underlying data array.
*
* @return a copy of the underlying data array.
*/
private double[][] copyOut() {
final int nRows = this.getRowDimension();
final double[][] out = new double[nRows][this.getColumnDimension()];
// can't copy 2-d array in one shot, otherwise get row references
for (int i = 0; i < nRows; i++) {
System.arraycopy(data[i], 0, out[i], 0, data[i].length);
}
return out;
}
/**
* Replace data with a fresh copy of the input array.
*
* @param in Data to copy.
* @throws NoDataException if the input array is empty.
* @throws DimensionMismatchException if the input array is not rectangular.
* @throws NullArgumentException if
* the input array is {@code null}.
*/
private void copyIn(final double[][] in)
throws DimensionMismatchException, NoDataException, NullArgumentException {
setSubMatrix(in, 0, 0);
}
}
|
src/main/java/org/apache/commons/math/linear/Array2DRowRealMatrix.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math.linear;
import java.io.Serializable;
import org.apache.commons.math.exception.DimensionMismatchException;
import org.apache.commons.math.exception.NullArgumentException;
import org.apache.commons.math.exception.NoDataException;
import org.apache.commons.math.exception.MathIllegalStateException;
import org.apache.commons.math.exception.util.LocalizedFormats;
import org.apache.commons.math.util.MathUtils;
/**
* Implementation of RealMatrix using a double[][] array to store entries and
* <a href="http://www.math.gatech.edu/~bourbaki/math2601/Web-notes/2num.pdf">
* LU decomposition</a> to support linear system
* solution and inverse.
* <p>
* The LU decomposition is performed as needed, to support the following operations: <ul>
* <li>solve</li>
* <li>isSingular</li>
* <li>getDeterminant</li>
* <li>inverse</li> </ul></p>
* <p>
* <strong>Usage notes</strong>:<br>
* <ul><li>
* The LU decomposition is cached and reused on subsequent calls.
* If data are modified via references to the underlying array obtained using
* <code>getDataRef()</code>, then the stored LU decomposition will not be
* discarded. In this case, you need to explicitly invoke
* <code>LUDecompose()</code> to recompute the decomposition
* before using any of the methods above.</li>
* <li>
* As specified in the {@link RealMatrix} interface, matrix element indexing
* is 0-based -- e.g., <code>getEntry(0, 0)</code>
* returns the element in the first row, first column of the matrix.</li></ul>
* </p>
*
* @version $Id$
*/
public class Array2DRowRealMatrix extends AbstractRealMatrix implements Serializable {
/** Serializable version identifier. */
private static final long serialVersionUID = -1067294169172445528L;
/** Entries of the matrix. */
protected double data[][];
/**
* Creates a matrix with no data
*/
public Array2DRowRealMatrix() {}
/**
* Create a new RealMatrix with the supplied row and column dimensions.
*
* @param rowDimension Number of rows in the new matrix.
* @param columnDimension Number of columns in the new matrix.
* @throws org.apache.commons.math.exception.NotStrictlyPositiveException
* if the row or column dimension is not positive.
*/
public Array2DRowRealMatrix(final int rowDimension, final int columnDimension) {
super(rowDimension, columnDimension);
data = new double[rowDimension][columnDimension];
}
/**
* Create a new {@code RealMatrix} using the input array as the underlying
* data array.
* <p>The input array is copied, not referenced. This constructor has
* the same effect as calling {@link #Array2DRowRealMatrix(double[][], boolean)}
* with the second argument set to {@code true}.</p>
*
* @param d Data for the new matrix.
* @throws DimensionMismatchException if {@code d} is not rectangular.
* @throws NoDataException if {@code d} row or colum dimension is zero.
* @throws NullArgumentException if {@code d} is {@code null}.
* @see #Array2DRowRealMatrix(double[][], boolean)
*/
public Array2DRowRealMatrix(final double[][] d)
throws DimensionMismatchException, NoDataException, NullArgumentException {
copyIn(d);
}
/**
* Create a new RealMatrix using the input array as the underlying
* data array.
* If an array is built specially in order to be embedded in a
* RealMatrix and not used directly, the {@code copyArray} may be
* set to {@code false}. This will prevent the copying and improve
* performance as no new array will be built and no data will be copied.
*
* @param d Data for new matrix.
* @param copyArray if {@code true}, the input array will be copied,
* otherwise it will be referenced.
* @throws DimensionMismatchException if {@code d} is not rectangular
* (not all rows have the same length) or empty.
* @throws NullArgumentException if {@code d} is {@code null}.
* @throws NoDataException if there are not at least one row and one column.
* @see #Array2DRowRealMatrix(double[][])
*/
public Array2DRowRealMatrix(final double[][] d, final boolean copyArray) {
if (copyArray) {
copyIn(d);
} else {
if (d == null) {
throw new NullArgumentException();
}
final int nRows = d.length;
if (nRows == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_ROW);
}
final int nCols = d[0].length;
if (nCols == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_COLUMN);
}
for (int r = 1; r < nRows; r++) {
if (d[r].length != nCols) {
throw new DimensionMismatchException(d[r].length, nCols);
}
}
data = d;
}
}
/**
* Create a new (column) RealMatrix using {@code v} as the
* data for the unique column of the created matrix.
* The input array is copied.
*
* @param v Column vector holding data for new matrix.
*/
public Array2DRowRealMatrix(final double[] v) {
final int nRows = v.length;
data = new double[nRows][1];
for (int row = 0; row < nRows; row++) {
data[row][0] = v[row];
}
}
/** {@inheritDoc} */
@Override
public RealMatrix createMatrix(final int rowDimension,
final int columnDimension) {
return new Array2DRowRealMatrix(rowDimension, columnDimension);
}
/** {@inheritDoc} */
@Override
public RealMatrix copy() {
return new Array2DRowRealMatrix(copyOut(), false);
}
/**
* Compute the sum of this matrix with {@code m}.
*
* @param m Matrix to be added.
* @return {@code this} + m.
* @throws MatrixDimensionMismatchException
* if {@code m} is not the same size as this matrix.
*/
public Array2DRowRealMatrix add(final Array2DRowRealMatrix m) {
// Safety check.
MatrixUtils.checkAdditionCompatible(this, m);
final int rowCount = getRowDimension();
final int columnCount = getColumnDimension();
final double[][] outData = new double[rowCount][columnCount];
for (int row = 0; row < rowCount; row++) {
final double[] dataRow = data[row];
final double[] mRow = m.data[row];
final double[] outDataRow = outData[row];
for (int col = 0; col < columnCount; col++) {
outDataRow[col] = dataRow[col] + mRow[col];
}
}
return new Array2DRowRealMatrix(outData, false);
}
/**
* Subtract {@code m} from this matrix.
*
* @param m Matrix to be subtracted.
* @return {@code this} - m.
* @throws MatrixDimensionMismatchException
* if {@code m} is not the same size as this matrix.
*/
public Array2DRowRealMatrix subtract(final Array2DRowRealMatrix m) {
// Safety check.
MatrixUtils.checkSubtractionCompatible(this, m);
final int rowCount = getRowDimension();
final int columnCount = getColumnDimension();
final double[][] outData = new double[rowCount][columnCount];
for (int row = 0; row < rowCount; row++) {
final double[] dataRow = data[row];
final double[] mRow = m.data[row];
final double[] outDataRow = outData[row];
for (int col = 0; col < columnCount; col++) {
outDataRow[col] = dataRow[col] - mRow[col];
}
}
return new Array2DRowRealMatrix(outData, false);
}
/**
* Postmultiplying this matrix by {@code m}.
*
* @param m Matrix to postmultiply by.
* @return {@code this} * m.
* @throws DimensionMismatchException if the number of columns of this
* matrix is not equal to the number of rows of {@code m}.
*/
public Array2DRowRealMatrix multiply(final Array2DRowRealMatrix m) {
// Safety check.
MatrixUtils.checkMultiplicationCompatible(this, m);
final int nRows = this.getRowDimension();
final int nCols = m.getColumnDimension();
final int nSum = this.getColumnDimension();
final double[][] outData = new double[nRows][nCols];
for (int row = 0; row < nRows; row++) {
final double[] dataRow = data[row];
final double[] outDataRow = outData[row];
for (int col = 0; col < nCols; col++) {
double sum = 0;
for (int i = 0; i < nSum; i++) {
sum += dataRow[i] * m.data[i][col];
}
outDataRow[col] = sum;
}
}
return new Array2DRowRealMatrix(outData, false);
}
/** {@inheritDoc} */
@Override
public double[][] getData() {
return copyOut();
}
/**
* Get a reference to the underlying data array.
*
* @return 2-dimensional array of entries.
*/
public double[][] getDataRef() {
return data;
}
/** {@inheritDoc} */
@Override
public void setSubMatrix(final double[][] subMatrix,
final int row, final int column) {
if (data == null) {
if (row > 0) {
throw new MathIllegalStateException(LocalizedFormats.FIRST_ROWS_NOT_INITIALIZED_YET, row);
}
if (column > 0) {
throw new MathIllegalStateException(LocalizedFormats.FIRST_COLUMNS_NOT_INITIALIZED_YET, column);
}
MathUtils.checkNotNull(subMatrix);
final int nRows = subMatrix.length;
if (nRows == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_ROW);
}
final int nCols = subMatrix[0].length;
if (nCols == 0) {
throw new NoDataException(LocalizedFormats.AT_LEAST_ONE_COLUMN);
}
data = new double[subMatrix.length][nCols];
for (int i = 0; i < data.length; ++i) {
if (subMatrix[i].length != nCols) {
throw new DimensionMismatchException(subMatrix[i].length, nCols);
}
System.arraycopy(subMatrix[i], 0, data[i + row], column, nCols);
}
} else {
super.setSubMatrix(subMatrix, row, column);
}
}
/** {@inheritDoc} */
@Override
public double getEntry(final int row, final int column) {
MatrixUtils.checkMatrixIndex(this, row, column);
return data[row][column];
}
/** {@inheritDoc} */
@Override
public void setEntry(final int row, final int column, final double value) {
MatrixUtils.checkMatrixIndex(this, row, column);
data[row][column] = value;
}
/** {@inheritDoc} */
@Override
public void addToEntry(final int row, final int column, final double increment) {
MatrixUtils.checkMatrixIndex(this, row, column);
data[row][column] += increment;
}
/** {@inheritDoc} */
@Override
public void multiplyEntry(final int row, final int column, final double factor) {
MatrixUtils.checkMatrixIndex(this, row, column);
data[row][column] *= factor;
}
/** {@inheritDoc} */
@Override
public int getRowDimension() {
return (data == null) ? 0 : data.length;
}
/** {@inheritDoc} */
@Override
public int getColumnDimension() {
return ((data == null) || (data[0] == null)) ? 0 : data[0].length;
}
/** {@inheritDoc} */
@Override
public double[] operate(final double[] v) {
final int nRows = this.getRowDimension();
final int nCols = this.getColumnDimension();
if (v.length != nCols) {
throw new DimensionMismatchException(v.length, nCols);
}
final double[] out = new double[nRows];
for (int row = 0; row < nRows; row++) {
final double[] dataRow = data[row];
double sum = 0;
for (int i = 0; i < nCols; i++) {
sum += dataRow[i] * v[i];
}
out[row] = sum;
}
return out;
}
/** {@inheritDoc} */
@Override
public double[] preMultiply(final double[] v) {
final int nRows = getRowDimension();
final int nCols = getColumnDimension();
if (v.length != nRows) {
throw new DimensionMismatchException(v.length, nRows);
}
final double[] out = new double[nCols];
for (int col = 0; col < nCols; ++col) {
double sum = 0;
for (int i = 0; i < nRows; ++i) {
sum += data[i][col] * v[i];
}
out[col] = sum;
}
return out;
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixChangingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int i = 0; i < rows; ++i) {
final double[] rowI = data[i];
for (int j = 0; j < columns; ++j) {
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixPreservingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int i = 0; i < rows; ++i) {
final double[] rowI = data[i];
for (int j = 0; j < columns; ++j) {
visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixChangingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int i = startRow; i <= endRow; ++i) {
final double[] rowI = data[i];
for (int j = startColumn; j <= endColumn; ++j) {
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInRowOrder(final RealMatrixPreservingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int i = startRow; i <= endRow; ++i) {
final double[] rowI = data[i];
for (int j = startColumn; j <= endColumn; ++j) {
visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixChangingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int j = 0; j < columns; ++j) {
for (int i = 0; i < rows; ++i) {
final double[] rowI = data[i];
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixPreservingVisitor visitor) {
final int rows = getRowDimension();
final int columns = getColumnDimension();
visitor.start(rows, columns, 0, rows - 1, 0, columns - 1);
for (int j = 0; j < columns; ++j) {
for (int i = 0; i < rows; ++i) {
visitor.visit(i, j, data[i][j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixChangingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int j = startColumn; j <= endColumn; ++j) {
for (int i = startRow; i <= endRow; ++i) {
final double[] rowI = data[i];
rowI[j] = visitor.visit(i, j, rowI[j]);
}
}
return visitor.end();
}
/** {@inheritDoc} */
@Override
public double walkInColumnOrder(final RealMatrixPreservingVisitor visitor,
final int startRow, final int endRow,
final int startColumn, final int endColumn) {
MatrixUtils.checkSubMatrixIndex(this, startRow, endRow, startColumn, endColumn);
visitor.start(getRowDimension(), getColumnDimension(),
startRow, endRow, startColumn, endColumn);
for (int j = startColumn; j <= endColumn; ++j) {
for (int i = startRow; i <= endRow; ++i) {
visitor.visit(i, j, data[i][j]);
}
}
return visitor.end();
}
/**
* Get a fresh copy of the underlying data array.
*
* @return a copy of the underlying data array.
*/
private double[][] copyOut() {
final int nRows = this.getRowDimension();
final double[][] out = new double[nRows][this.getColumnDimension()];
// can't copy 2-d array in one shot, otherwise get row references
for (int i = 0; i < nRows; i++) {
System.arraycopy(data[i], 0, out[i], 0, data[i].length);
}
return out;
}
/**
* Replace data with a fresh copy of the input array.
*
* @param in Data to copy.
* @throws NoDataException if the input array is empty.
* @throws DimensionMismatchException if the input array is not rectangular.
* @throws NullArgumentException if
* the input array is {@code null}.
*/
private void copyIn(final double[][] in)
throws DimensionMismatchException, NoDataException, NullArgumentException {
setSubMatrix(in, 0, 0);
}
}
|
Deleted obsolete Javadoc.
git-svn-id: 80d496c472b8b763a5e941dba212da9bf48aeceb@1166751 13f79535-47bb-0310-9956-ffa450edef68
|
src/main/java/org/apache/commons/math/linear/Array2DRowRealMatrix.java
|
Deleted obsolete Javadoc.
|
|
Java
|
apache-2.0
|
51f3fcfdaf12f8b4dd3ce8e0f5af9d7e80125a2c
| 0
|
Ensembl/ensj-healthcheck,thomasmaurel/ensj-healthcheck,thomasmaurel/ensj-healthcheck,thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck,thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck,Ensembl/ensj-healthcheck
|
/*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
* Copyright [2016-2018] EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ensembl.healthcheck.testcase.compara;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
import org.apache.commons.lang.StringUtils;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.DatabaseType;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.Team;
import org.ensembl.healthcheck.testcase.AbstractTemplatedTestCase;
public abstract class AbstractMLSSTagStats extends AbstractTemplatedTestCase {
private final static String QUERY = "SELECT method_link_species_set_id, stats_related_tags FROM "
+ "(SELECT mlss.method_link_species_set_id, tc.stats_related_tags "
+ "FROM method_link_species_set mlss "
+ "INNER JOIN method_link ml ON mlss.method_link_id = ml.method_link_id "
+ "LEFT JOIN (SELECT count(*) stats_related_tags, method_link_species_set_id "
+ "FROM method_link_species_set_tag WHERE tag IN "
+ "(%s) "
+ "GROUP BY method_link_species_set_id) as tc "
+ "ON mlss.method_link_species_set_id = tc.method_link_species_set_id "
+ "WHERE ml.type = '%s' "
+ "HAVING (stats_related_tags != %d) OR (stats_related_tags IS NULL)) as find_missing_stats";
abstract protected HashMap<String,String[]> getMandatoryTags();
public AbstractMLSSTagStats() {
setTeamResponsible(Team.COMPARA);
appliesToType(DatabaseType.COMPARA);
setDescription("Checks whether stats have been generated for all MLSSs");
}
@Override
protected boolean runTest(DatabaseRegistryEntry dbre) {
boolean result = true;
for (Map.Entry<String, String[]> method_tags : getMandatoryTags().entrySet()) {
Vector<String> quoted_tags = new Vector<String>();
for (String t: method_tags.getValue()) {
quoted_tags.add(String.format("'%s'", t));
}
List<String> mlsss = getTemplate(dbre).queryForDefaultObjectList(String.format(QUERY, StringUtils.join(quoted_tags, ","), method_tags.getKey(), method_tags.getValue().length), String.class);
if (mlsss.size() > 0) {
ReportManager.problem( this, dbre.getConnection(), "MLSSs for " + method_tags.getKey() + " found with no statistics: " + StringUtils.join(mlsss, ","));
ReportManager.problem( this, dbre.getConnection(), "USEFUL SQL: " + String.format(QUERY, StringUtils.join(quoted_tags, ","), method_tags.getKey(), method_tags.getValue().length));
result = false;
} else {
ReportManager.correct(this, dbre.getConnection(), "PASSED ");
}
}
return result;
}
}
|
src/org/ensembl/healthcheck/testcase/compara/AbstractMLSSTagStats.java
|
/*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
* Copyright [2016-2018] EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ensembl.healthcheck.testcase.compara;
import java.util.List;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
import org.apache.commons.lang.StringUtils;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.DatabaseType;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.Team;
import org.ensembl.healthcheck.testcase.AbstractTemplatedTestCase;
public abstract class AbstractMLSSTagStats extends AbstractTemplatedTestCase {
private final static String QUERY = "SELECT method_link_species_set_id FROM "
+ "(SELECT mlss.method_link_species_set_id, tc.stats_related_tags "
+ "FROM method_link_species_set mlss "
+ "INNER JOIN method_link ml ON mlss.method_link_id = ml.method_link_id "
+ "LEFT JOIN (SELECT count(*) stats_related_tags, method_link_species_set_id "
+ "FROM method_link_species_set_tag WHERE tag IN "
+ "(%s) "
+ "GROUP BY method_link_species_set_id) as tc "
+ "ON mlss.method_link_species_set_id = tc.method_link_species_set_id "
+ "WHERE ml.type = '%s' "
+ "HAVING (stats_related_tags != %d) OR (stats_related_tags IS NULL)) as find_missing_stats";
abstract protected HashMap<String,String[]> getMandatoryTags();
public AbstractMLSSTagStats() {
setTeamResponsible(Team.COMPARA);
appliesToType(DatabaseType.COMPARA);
setDescription("Checks whether stats have been generated for all MLSSs");
}
@Override
protected boolean runTest(DatabaseRegistryEntry dbre) {
boolean result = true;
for (Map.Entry<String, String[]> method_tags : getMandatoryTags().entrySet()) {
Vector<String> quoted_tags = new Vector<String>();
for (String t: method_tags.getValue()) {
quoted_tags.add(String.format("'%s'", t));
}
List<String> mlsss = getTemplate(dbre).queryForDefaultObjectList(String.format(QUERY, StringUtils.join(quoted_tags, ","), method_tags.getKey(), method_tags.getValue().length), String.class);
if (mlsss.size() > 0) {
ReportManager.problem( this, dbre.getConnection(), "MLSSs for " + method_tags.getKey() + " found with no statistics: " + StringUtils.join(mlsss, ","));
result = false;
} else {
ReportManager.correct(this, dbre.getConnection(), "PASSED ");
}
}
return result;
}
}
|
Show a useful SQL query to test the HC
|
src/org/ensembl/healthcheck/testcase/compara/AbstractMLSSTagStats.java
|
Show a useful SQL query to test the HC
|
|
Java
|
apache-2.0
|
79103718a3dbb25677b3cc752bfa1199f709bd04
| 0
|
booknara/nio_playground
|
package com.github.booknara.nioexample;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Iterator;
import java.util.Set;
/**
* Created by Daehee Han(@daniel_booknara) on 2/12/16.
*/
public class SelectorExample {
public static final int PORT = 80;
public static void main(String[] args) {
Selector selector;
try {
// Define socket channel
SocketChannel channel = SocketChannel.open();
InetSocketAddress address = new InetSocketAddress(PORT);
channel.connect(address);
selector = Selector.open();
channel.configureBlocking(false);
SelectionKey selectionKey = channel.register(selector, SelectionKey.OP_READ);
while(true) {
// Return how many channels are ready to read
int readyChannels = selector.select();
if(readyChannels == 0)
continue;
Set<SelectionKey> selectedKeys = selector.selectedKeys();
Iterator<SelectionKey> keyIterator = selectedKeys.iterator();
while(keyIterator.hasNext()) {
SelectionKey key = keyIterator.next();
if(key.isAcceptable()) {
// a connection was accepted by a ServerSocketChannel.
// ServerSocketChannel serverSocketChannel = (ServerSocketChannel) key.channel();
// SocketChannel socketChannel = (SocketChannel) key.channel();
} else if (key.isConnectable()) {
// a connection was established with a remote server.
} else if (key.isReadable()) {
// a channel is ready for reading
} else if (key.isWritable()) {
// a channel is ready for writing
}
keyIterator.remove();
}
}
} catch (IOException e) {
System.out.println(e.getStackTrace());
}
}
}
|
src/com/github/booknara/nioexample/SelectorExample.java
|
package com.github.booknara.nioexample;
import java.io.IOException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Iterator;
import java.util.Set;
/**
* Created by Daehee Han(@daniel_booknara) on 2/12/16.
* This example is an incomplete code.
*/
public class SelectorExample {
public static void main(String[] args) {
Selector selector;
// Define socket channel
// SocketChannel channel = new SocketChannel();
//
// try {
// selector = Selector.open();
// channel.configureBlocking(false);
//
// SelectionKey selectionKey = channel.register(selector, SelectionKey.OP_READ);
//
//
// while(true) {
//
// // Return how many channels are ready to read
// int readyChannels = selector.select();
//
// if(readyChannels == 0)
// continue;
//
//
// Set<SelectionKey> selectedKeys = selector.selectedKeys();
//
// Iterator<SelectionKey> keyIterator = selectedKeys.iterator();
//
// while(keyIterator.hasNext()) {
//
// SelectionKey key = keyIterator.next();
//
// if(key.isAcceptable()) {
// // a connection was accepted by a ServerSocketChannel.
// // ServerSocketChannel serverSocketChannel = (ServerSocketChannel) key.channel();
// // SocketChannel socketChannel = (SocketChannel) key.channel();
//
// } else if (key.isConnectable()) {
// // a connection was established with a remote server.
//
// } else if (key.isReadable()) {
// // a channel is ready for reading
//
// } else if (key.isWritable()) {
// // a channel is ready for writing
// }
//
// keyIterator.remove();
// }
// }
//
// } catch (IOException e) {
// System.out.println(e.getStackTrace());
// }
}
}
|
Add SocketChannel configuration
|
src/com/github/booknara/nioexample/SelectorExample.java
|
Add SocketChannel configuration
|
|
Java
|
apache-2.0
|
ed725c5548b99a915460e9aecd9ff16796a8251e
| 0
|
ruspl-afed/dbeaver,AndrewKhitrin/dbeaver,liuyuanyuan/dbeaver,AndrewKhitrin/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver,ruspl-afed/dbeaver,AndrewKhitrin/dbeaver,liuyuanyuan/dbeaver,AndrewKhitrin/dbeaver,ruspl-afed/dbeaver,ruspl-afed/dbeaver,liuyuanyuan/dbeaver
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2016 Serge Rieder (serge@jkiss.org)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License (version 2)
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.jkiss.dbeaver.ext.oracle.model;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCDatabaseMetaData;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCSQLDialect;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
/**
* Oracle SQL dialect
*/
class OracleSQLDialect extends JDBCSQLDialect {
private static final String[] ORACLE_LINE_COMMENTS = {
"--",
//"^rem"
};
public OracleSQLDialect(JDBCDatabaseMetaData metaData) {
super("Oracle", metaData);
addSQLKeyword("ANALYZE");
addSQLKeyword("VALIDATE");
addSQLKeyword("STRUCTURE");
addSQLKeyword("COMPUTE");
addSQLKeyword("STATISTICS");
addSQLKeyword("NULLS");
addSQLKeyword("FIRST");
addSQLKeyword("LAST");
addFunctions(
Arrays.asList(
"SUBSTR", "APPROX_COUNT_DISTINCT",
"REGEXP_SUBSTR", "REGEXP_INSTR", "REGEXP_REPLACE", "REGEXP_LIKE",
// Additions from #323
//Number Functions:
"BITAND",
"COSH",
"NANVL",
"REMAINDER",
"SINH",
"TANH",
"TRUNC",
//Character Functions Returning Character Values:
"CHR",
"INITCAP",
"LPAD",
"NLS_INITCAP",
"NLS_LOWER",
"NLSSORT",
"NLS_UPPER",
"RPAD",
// NLS Character Functions:
"NLS_CHARSET_DECL_LEN",
"NLS_CHARSET_ID",
"NLS_CHARSET_NAME",
//Character Functions Returning Number VALUES:
"INSTR",
//Datetime Functions:
"ADD_MONTHS",
"DBTIMEZONE",
"FROM_TZ",
"LAST_DAY",
"MONTHS_BETWEEN",
"NEW_TIME",
"NEXT_DAY",
"NUMTODSINTERVAL",
"NUMTOYMINTERVAL",
"SESSIONTIMEZONE",
"SYS_EXTRACT_UTC",
"SYSDATE",
"SYSTIMESTAMP",
"TO_CHAR",
"TO_TIMESTAMP",
"TO_TIMESTAMP_TZ",
"TO_DSINTERVAL",
"TO_YMINTERVAL",
"TRUNC",
"TZ_OFFSET",
//General Comparison Functions:
"GREATEST",
"LEAST",
//Conversion Functions:
"ASCIISTR",
"BIN_TO_NUM",
"CHARTOROWID",
"COMPOSE",
"DECOMPOSE",
"HEXTORAW",
"NUMTODSINTERVAL",
"NUMTOYMINTERVAL",
"RAWTOHEX",
"RAWTONHEX",
"ROWIDTOCHAR",
"ROWIDTONCHAR",
"SCN_TO_TIMESTAMP",
"TIMESTAMP_TO_SCN",
"TO_BINARY_DOUBLE",
"TO_BINARY_FLOAT",
"TO_CHAR",
"TO_CLOB",
"TO_DATE",
"TO_DSINTERVAL",
"TO_LOB",
"TO_MULTI_BYTE",
"TO_NCHAR",
"TO_NCLOB",
"TO_NUMBER",
"TO_DSINTERVAL",
"TO_SINGLE_BYTE",
"TO_TIMESTAMP",
"TO_TIMESTAMP_TZ",
"TO_YMINTERVAL",
"TO_YMINTERVAL",
"UNISTR",
//Large Object Functions:
"BFILENAME",
"EMPTY_BLOB",
"EMPTY_CLOB",
//Collection Functions:
"POWERMULTISET",
"POWERMULTISET_BY_CARDINALITY",
//Hierarchical FUNCTION:
"SYS_CONNECT_BY_PATH",
//Data Mining Functions:
"CLUSTER_ID",
"CLUSTER_PROBABILITY",
"CLUSTER_SET",
"FEATURE_ID",
"FEATURE_SET",
"FEATURE_VALUE",
"PREDICTION",
"PREDICTION_COST",
"PREDICTION_DETAILS",
"PREDICTION_PROBABILITY",
"PREDICTION_SET",
//XML Functions:
"APPENDCHILDXML",
"DELETEXML",
"DEPTH",
"EXISTSNODE",
"EXTRACTVALUE",
"INSERTCHILDXML",
"INSERTXMLBEFORE",
"PATH",
"SYS_DBURIGEN",
"SYS_XMLAGG",
"SYS_XMLGEN",
"UPDATEXML",
"XMLAGG",
"XMLCDATA",
"XMLCOLATTVAL",
"XMLCOMMENT",
"XMLCONCAT",
"XMLFOREST",
"XMLPARSE",
"XMLPI",
"XMLQUERY",
"XMLROOT",
"XMLSEQUENCE",
"XMLSERIALIZE",
"XMLTABLE",
"XMLTRANSFORM",
//Encoding and Decoding Functions:
"DECODE",
"DUMP",
"ORA_HASH",
"VSIZE",
//NULL-Related Functions:
"LNNVL",
"NVL",
"NVL2",
//Environment and Identifier Functions:
"SYS_CONTEXT",
"SYS_GUID",
"SYS_TYPEID",
"UID",
"USERENV",
//Aggregate Functions:
"CORR_S",
"CORR_K",
"FIRST",
"GROUP_ID",
"GROUPING_ID",
"LAST",
"MEDIAN",
"STATS_BINOMIAL_TEST",
"STATS_CROSSTAB",
"STATS_F_TEST",
"STATS_KS_TEST",
"STATS_MODE",
"STATS_MW_TEST",
"STATS_ONE_WAY_ANOVA",
"STATS_T_TEST_ONE",
"STATS_T_TEST_PAIRED",
"STATS_T_TEST_INDEP",
"STATS_T_TEST_INDEPU",
"STATS_WSR_TEST",
"STDDEV",
"VARIANCE",
//Analytic Functions:
"FIRST",
"FIRST_VALUE",
"LAG",
"LAST",
"LAST_VALUE",
"LEAD",
"NTILE",
"RATIO_TO_REPORT",
"STDDEV",
"VARIANCE",
//Object Reference Functions:
"MAKE_REF",
"REFTOHEX",
//Model Functions:
"CV",
"ITERATION_NUMBER",
"PRESENTNNV",
"PRESENTV",
"PREVIOUS"
));
}
@NotNull
@Override
public Collection<String> getExecuteKeywords() {
return Collections.singleton("call");
}
@NotNull
@Override
public MultiValueInsertMode getMultiValueInsertMode() {
return MultiValueInsertMode.GROUP_ROWS;
}
@Override
public String[] getSingleLineComments() {
return ORACLE_LINE_COMMENTS;
}
@Override
public boolean supportsAliasInUpdate() {
return true;
}
@Override
public boolean isDelimiterAfterBlock() {
return true;
}
}
|
plugins/org.jkiss.dbeaver.ext.oracle/src/org/jkiss/dbeaver/ext/oracle/model/OracleSQLDialect.java
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2016 Serge Rieder (serge@jkiss.org)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License (version 2)
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.jkiss.dbeaver.ext.oracle.model;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCDatabaseMetaData;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCSQLDialect;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
/**
* Oracle SQL dialect
*/
class OracleSQLDialect extends JDBCSQLDialect {
private static final String[] ORACLE_LINE_COMMENTS = {
"--",
//"^rem"
};
public OracleSQLDialect(JDBCDatabaseMetaData metaData) {
super("Oracle", metaData);
addSQLKeyword("ANALYZE");
addSQLKeyword("VALIDATE");
addSQLKeyword("STRUCTURE");
addSQLKeyword("COMPUTE");
addSQLKeyword("STATISTICS");
addFunctions(
Arrays.asList(
"SUBSTR", "APPROX_COUNT_DISTINCT",
"REGEXP_SUBSTR", "REGEXP_INSTR", "REGEXP_REPLACE", "REGEXP_LIKE"));
}
@NotNull
@Override
public Collection<String> getExecuteKeywords() {
return Collections.singleton("call");
}
@NotNull
@Override
public MultiValueInsertMode getMultiValueInsertMode() {
return MultiValueInsertMode.GROUP_ROWS;
}
@Override
public String[] getSingleLineComments() {
return ORACLE_LINE_COMMENTS;
}
@Override
public boolean supportsAliasInUpdate() {
return true;
}
@Override
public boolean isDelimiterAfterBlock() {
return true;
}
}
|
#323 Oracle functions
|
plugins/org.jkiss.dbeaver.ext.oracle/src/org/jkiss/dbeaver/ext/oracle/model/OracleSQLDialect.java
|
#323 Oracle functions
|
|
Java
|
apache-2.0
|
4be38e8bffc903dd14fab562d26c3c5fe641a1e7
| 0
|
akiellor/selenium,virajs/selenium-1,mogotest/selenium,virajs/selenium-1,mogotest/selenium,mogotest/selenium,winhamwr/selenium,akiellor/selenium,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,akiellor/selenium,virajs/selenium-1,mogotest/selenium,akiellor/selenium,mogotest/selenium,akiellor/selenium,akiellor/selenium,virajs/selenium-1,akiellor/selenium,winhamwr/selenium,mogotest/selenium,mogotest/selenium,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,mogotest/selenium,akiellor/selenium,winhamwr/selenium,mogotest/selenium,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1
|
/*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Portions copyright 2007 ThoughtWorks, Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import java.util.List;
/**
* Represents an HTML element. Generally, all interesting operations to do with
* interacting with a page will be performed through this interface.
*
* All method calls will do a freshness check to ensure that the element
* reference is still valid. This essentially determines whether or not the
* element is still attached to the DOM. If this test fails, then an
* {@link org.openqa.selenium.StaleElementReferenceException} is thrown, and
* all future calls to this instance will fail.
*/
public interface WebElement extends SearchContext {
/**
* Click this element. If this causes a new page to load, this method will
* block until the page has loaded. At this point, you should discard all
* references to this element and any further operations performed on this
* element will have undefined behaviour unless you know that the element
* and the page will still be present.
* If click() causes a new page to be loaded via an event or is done by
* sending a native event (which is a common case on Firefox, IE on Windows)
* then the method will *not* wait for it to be loaded and the caller should
* verify that a new page has been loaded.
* <p/>
* If this element is not clickable, then this operation is a no-op since
* it's pretty common for someone to accidentally miss the target when
* clicking in Real Life
*/
void click();
/**
* If this current element is a form, or an element within a form, then this
* will be submitted to the remote server. If this causes the current page
* to change, then this method will block until the new page is loaded.
*
* @throws NoSuchElementException If the given element is not within a form
*/
void submit();
/**
* Get the value of the element's "value" attribute. If this value has been
* modified after the page has loaded (for example, through javascript) then
* this will reflect the current value of the "value" attribute.
*
* @return The value of the element's "value" attribute.
* @see WebElement#getAttribute(String)
*/
String getValue();
/**
* Use this method to simulate typing into an element, which may set its value.
*/
void sendKeys(CharSequence... keysToSend);
/**
* If this element is a text entry element, this will clear the value. Has no
* effect on other elements. Text entry elements are INPUT and TEXTAREA elements.
*/
void clear();
/**
* Get the tag name of this element. <b>Not</b> the value of the name attribute:
* will return <code>"input"</code> for the element <code><input name="foo" /></code>.
*
* @return The tag name of this element.
*/
String getTagName();
/**
* Get the value of a the given attribute of the element. Will return the
* current value, even if this has been modified after the page has been
* loaded. Note that the value of the attribute "checked" will return
* "checked" if the element is a input of type checkbox and there is no
* explicit "checked" attribute, and will also return "selected" for an
* option that is selected even if there is no explicit "selected"
* attribute. The expected value of "disabled" is also returned.
*
* @param name The name of the attribute.
* @return The attribute's current value or null if the value is not set.
*/
String getAttribute(String name);
/**
* If the element is a checkbox this will toggle the elements state from
* selected to not selected, or from not selected to selected.
*
* @return Whether the toggled element is selected (true) or not (false)
* after this toggle is complete
*/
boolean toggle();
/**
* Determine whether or not this element is selected or not. This operation
* only applies to input elements such as checkboxes, options in a select
* and radio buttons.
*
* @return True if the element is currently selected or checked, false otherwise.
*/
boolean isSelected();
/**
* Select an element. This method will work against radio buttons, "option"
* elements within a "select" and checkboxes
*/
void setSelected();
/**
* Is the element currently enabled or not? This will generally return true
* for everything but disabled input elements.
*
* @return True if the element is enabled, false otherwise.
*/
boolean isEnabled();
/**
* Get the visible (i.e. not hidden by CSS) innerText of this element,
* including sub-elements, without any leading or trailing whitespace.
*
* @return The innerText of this element.
*/
String getText();
/**
* Find all elements within the current context using the given mechanism.
*
* @param by The locating mechanism to use
* @return A list of all {@link WebElement}s, or an empty list if nothing matches
* @see org.openqa.selenium.By
*/
List<WebElement> findElements(By by);
/**
* Find the first {@link WebElement} using the given method.
*
* @param by The locating mechanism
* @return The first matching element on the current context
* @throws NoSuchElementException If no matching elements are found
*/
WebElement findElement(By by);
}
|
common/src/java/org/openqa/selenium/WebElement.java
|
/*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Portions copyright 2007 ThoughtWorks, Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import java.util.List;
/**
* Represents an HTML element. Generally, all interesting operations to do with
* interacting with a page will be performed through this interface.
*/
public interface WebElement extends SearchContext {
/**
* Click this element. If this causes a new page to load, this method will
* block until the page has loaded. At this point, you should discard all
* references to this element and any further operations performed on this
* element will have undefined behaviour unless you know that the element
* and the page will still be present.
* If click() causes a new page to be loaded via an event or is done by
* sending a native event (which is a common case on Firefox, IE on Windows)
* then the method will *not* wait for it to be loaded and the caller should
* verify that a new page has been loaded.
* <p/>
* If this element is not clickable, then this operation is a no-op since
* it's pretty common for someone to accidentally miss the target when
* clicking in Real Life
*/
void click();
/**
* If this current element is a form, or an element within a form, then this
* will be submitted to the remote server. If this causes the current page
* to change, then this method will block until the new page is loaded.
*
* @throws NoSuchElementException If the given element is not within a form
*/
void submit();
/**
* Get the value of the element's "value" attribute. If this value has been
* modified after the page has loaded (for example, through javascript) then
* this will reflect the current value of the "value" attribute.
*
* @return The value of the element's "value" attribute.
* @see WebElement#getAttribute(String)
*/
String getValue();
/**
* Use this method to simulate typing into an element, which may set its value.
*/
void sendKeys(CharSequence... keysToSend);
/**
* If this element is a text entry element, this will clear the value. Has no
* effect on other elements. Text entry elements are INPUT and TEXTAREA elements.
*/
void clear();
/**
* Get the tag name of this element. <b>Not</b> the value of the name attribute:
* will return <code>"input"</code> for the element <code><input name="foo" /></code>.
*
* @return The tag name of this element.
*/
String getTagName();
/**
* Get the value of a the given attribute of the element. Will return the
* current value, even if this has been modified after the page has been
* loaded. Note that the value of the attribute "checked" will return
* "checked" if the element is a input of type checkbox and there is no
* explicit "checked" attribute, and will also return "selected" for an
* option that is selected even if there is no explicit "selected"
* attribute. The expected value of "disabled" is also returned.
*
* @param name The name of the attribute.
* @return The attribute's current value or null if the value is not set.
*/
String getAttribute(String name);
/**
* If the element is a checkbox this will toggle the elements state from
* selected to not selected, or from not selected to selected.
*
* @return Whether the toggled element is selected (true) or not (false)
* after this toggle is complete
*/
boolean toggle();
/**
* Determine whether or not this element is selected or not. This operation
* only applies to input elements such as checkboxes, options in a select
* and radio buttons.
*
* @return True if the element is currently selected or checked, false otherwise.
*/
boolean isSelected();
/**
* Select an element. This method will work against radio buttons, "option"
* elements within a "select" and checkboxes
*/
void setSelected();
/**
* Is the element currently enabled or not? This will generally return true
* for everything but disabled input elements.
*
* @return True if the element is enabled, false otherwise.
*/
boolean isEnabled();
/**
* Get the visible (i.e. not hidden by CSS) innerText of this element,
* including sub-elements, without any leading or trailing whitespace.
*
* @return The innerText of this element.
*/
String getText();
/**
* Find all elements within the current context using the given mechanism.
*
* @param by The locating mechanism to use
* @return A list of all {@link WebElement}s, or an empty list if nothing matches
* @see org.openqa.selenium.By
*/
List<WebElement> findElements(By by);
/**
* Find the first {@link WebElement} using the given method.
*
* @param by The locating mechanism
* @return The first matching element on the current context
* @throws NoSuchElementException If no matching elements are found
*/
WebElement findElement(By by);
}
|
SimonStewart: Updating the docs on the web element to make it clear that a freshness check is performed.
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@8673 07704840-8298-11de-bf8c-fd130f914ac9
|
common/src/java/org/openqa/selenium/WebElement.java
|
SimonStewart: Updating the docs on the web element to make it clear that a freshness check is performed.
|
|
Java
|
bsd-2-clause
|
51c5f8dfe820eeb589214db75a6f4d449e34f067
| 0
|
ratan12/Atarashii,AnimeNeko/Atarashii,ratan12/Atarashii,AnimeNeko/Atarashii
|
package net.somethingdreadful.MAL;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.SearchManager;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.support.design.widget.NavigationView;
import android.support.v4.app.NotificationCompat;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.view.ViewPager;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import com.freshdesk.mobihelp.Mobihelp;
import com.squareup.picasso.Picasso;
import net.somethingdreadful.MAL.account.AccountService;
import net.somethingdreadful.MAL.adapters.IGFPagerAdapter;
import net.somethingdreadful.MAL.api.APIHelper;
import net.somethingdreadful.MAL.api.MALApi;
import net.somethingdreadful.MAL.dialog.ChooseDialogFragment;
import net.somethingdreadful.MAL.dialog.InputDialogFragment;
import net.somethingdreadful.MAL.tasks.TaskJob;
import butterknife.BindView;
import butterknife.ButterKnife;
public class Home extends AppCompatActivity implements ChooseDialogFragment.onClickListener, SwipeRefreshLayout.OnRefreshListener, IGF.IGFCallbackListener, View.OnClickListener, ViewPager.OnPageChangeListener, NavigationView.OnNavigationItemSelectedListener, InputDialogFragment.onClickListener {
private IGF af;
private IGF mf;
private Menu menu;
private BroadcastReceiver networkReceiver;
private String username;
private boolean networkAvailable = true;
private boolean myList = true; //tracks if the user is on 'My List' or not
private int callbackCounter = 0;
@BindView(R.id.navigationView)
NavigationView navigationView;
@BindView(R.id.drawerLayout)
DrawerLayout drawerLayout;
@Override
public void onCreate(Bundle state) {
super.onCreate(state);
//Initializing activity and application
Theme.context = getApplicationContext();
if (AccountService.AccountExists(this)) {
//The following is state handling code
if (state != null) {
myList = state.getBoolean("myList");
networkAvailable = state.getBoolean("networkAvailable", true);
}
//Initializing
Theme.setTheme(this, R.layout.activity_home, false);
Theme.setActionBar(this, new IGFPagerAdapter(getFragmentManager()));
ButterKnife.bind(this);
username = AccountService.getUsername();
//Initializing NavigationView
navigationView.setNavigationItemSelectedListener(this);
navigationView.getMenu().findItem(R.id.nav_list).setChecked(true);
Theme.setNavDrawer(navigationView, this, this);
//Initializing navigation toggle button
ActionBarDrawerToggle drawerToggle = new ActionBarDrawerToggle(this, drawerLayout, (Toolbar) findViewById(R.id.actionbar), R.string.drawer_open, R.string.drawer_close) {
};
drawerLayout.addDrawerListener(drawerToggle);
drawerToggle.syncState();
networkReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
checkNetworkAndDisplayCrouton();
myListChanged();
}
};
} else {
Intent firstRunInit = new Intent(this, FirstTimeInit.class);
startActivity(firstRunInit);
finish();
}
NfcHelper.disableBeam(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_home, menu);
SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE);
MenuItem searchItem = menu.findItem(R.id.action_search);
SearchView searchView = (SearchView) MenuItemCompat.getActionView(searchItem);
ComponentName cn = new ComponentName(this, SearchActivity.class);
searchView.setSearchableInfo(searchManager.getSearchableInfo(cn));
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.listType_all:
getRecords(true, TaskJob.GETLIST, 0);
setChecked(item);
break;
case R.id.listType_inprogress:
getRecords(true, TaskJob.GETLIST, 1);
setChecked(item);
break;
case R.id.listType_completed:
getRecords(true, TaskJob.GETLIST, 2);
setChecked(item);
break;
case R.id.listType_onhold:
getRecords(true, TaskJob.GETLIST, 3);
setChecked(item);
break;
case R.id.listType_dropped:
getRecords(true, TaskJob.GETLIST, 4);
setChecked(item);
break;
case R.id.listType_planned:
getRecords(true, TaskJob.GETLIST, 5);
setChecked(item);
break;
case R.id.listType_rewatching:
getRecords(true, TaskJob.GETLIST, 6);
setChecked(item);
break;
case R.id.forceSync:
synctask(true);
break;
case R.id.sort_title:
sortRecords(1, item);
break;
case R.id.sort_score:
sortRecords(2, item);
break;
case R.id.sort_type:
sortRecords(3, item);
break;
case R.id.sort_status:
sortRecords(4, item);
break;
case R.id.sort_progress:
sortRecords(5, item);
break;
case R.id.menu_details:
item.setChecked(!item.isChecked());
if (af != null && mf != null) {
af.details();
mf.details();
}
break;
case R.id.menu_inverse:
item.setChecked(!item.isChecked());
if (af != null && mf != null) {
af.inverse();
mf.inverse();
}
break;
}
return super.onOptionsItemSelected(item);
}
private void sortRecords(int sortType, MenuItem item) {
setChecked(item);
if (af != null && mf != null) {
af.sort(sortType);
mf.sort(sortType);
}
}
private void getRecords(boolean clear, TaskJob task, int list) {
if (af != null && mf != null) {
af.getRecords(clear, task, list);
mf.getRecords(clear, task, list);
if (task == TaskJob.FORCESYNC)
syncNotify();
}
}
@Override
public void onResume() {
super.onResume();
checkNetworkAndDisplayCrouton();
registerReceiver(networkReceiver, new IntentFilter("android.net.conn.CONNECTIVITY_CHANGE"));
}
@Override
public void onPause() {
super.onPause();
if (menu != null)
menu.findItem(R.id.action_search).collapseActionView();
unregisterReceiver(networkReceiver);
}
private void synctask(boolean clear) {
getRecords(clear, TaskJob.FORCESYNC, af.list);
}
@Override
public void onSaveInstanceState(Bundle state) {
//This is telling out future selves that we already have some things and not to do them
state.putBoolean("networkAvailable", networkAvailable);
state.putBoolean("myList", myList);
super.onSaveInstanceState(state);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
this.menu = menu;
if (af != null) {
//All this is handling the ticks in the switch list menu
switch (af.list) {
case 0:
setChecked(menu.findItem(R.id.listType_all));
break;
case 1:
setChecked(menu.findItem(R.id.listType_inprogress));
break;
case 2:
setChecked(menu.findItem(R.id.listType_completed));
break;
case 3:
setChecked(menu.findItem(R.id.listType_onhold));
break;
case 4:
setChecked(menu.findItem(R.id.listType_dropped));
break;
case 5:
setChecked(menu.findItem(R.id.listType_planned));
break;
case 6:
setChecked(menu.findItem(R.id.listType_rewatching));
break;
}
}
menu.findItem(R.id.sort_title).setChecked(true);
myListChanged();
return true;
}
private void setChecked(MenuItem item) {
if (item != null)
item.setChecked(true);
}
private void myListChanged() {
if (menu != null) {
if (af != null && mf != null)
menu.findItem(R.id.menu_details).setChecked(myList && af.getDetails());
menu.findItem(R.id.menu_listType).setVisible(myList);
menu.findItem(R.id.menu_sort).setVisible(myList);
menu.findItem(R.id.menu_inverse).setVisible(myList || (!AccountService.isMAL() && af.taskjob == TaskJob.GETMOSTPOPULAR));
menu.findItem(R.id.forceSync).setVisible(myList && networkAvailable);
menu.findItem(R.id.action_search).setVisible(networkAvailable);
}
}
/**
* Creates the sync notification.
*/
private void syncNotify() {
Intent notificationIntent = new Intent(this, Home.class);
PendingIntent contentIntent = PendingIntent.getActivity(this, 1, notificationIntent, PendingIntent.FLAG_CANCEL_CURRENT);
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(this)
.setOngoing(true)
.setSmallIcon(R.drawable.notification_icon)
.setContentTitle(getString(R.string.app_name))
.setContentText(getString(R.string.toast_info_SyncMessage))
.setContentIntent(contentIntent);
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(R.id.notification_sync, mBuilder.build());
}
private void showLogoutDialog() {
ChooseDialogFragment lcdf = new ChooseDialogFragment();
Bundle bundle = new Bundle();
bundle.putString("title", getString(R.string.dialog_label_logout));
bundle.putString("message", getString(R.string.dialog_message_logout));
bundle.putString("positive", getString(R.string.dialog_label_logout));
lcdf.setArguments(bundle);
lcdf.setCallback(this);
lcdf.show(getFragmentManager(), "fragment_LogoutConfirmationDialog");
}
private void checkNetworkAndDisplayCrouton() {
if (APIHelper.isNetworkAvailable(this) && !networkAvailable)
synctask(false);
networkAvailable = APIHelper.isNetworkAvailable(this);
}
@Override
public void onRefresh() {
if (networkAvailable)
synctask(false);
else {
if (af != null && mf != null) {
af.toggleSwipeRefreshAnimation(false);
mf.toggleSwipeRefreshAnimation(false);
}
Theme.Snackbar(this, R.string.toast_error_noConnectivity);
}
}
@Override
public void onIGFReady(IGF igf) {
igf.setUsername(AccountService.getUsername());
if (igf.isAnime())
af = igf;
else
mf = igf;
// do forced sync after FirstInit
if (PrefManager.getForceSync()) {
if (af != null && mf != null) {
PrefManager.setForceSync(false);
PrefManager.commitChanges();
synctask(true);
}
} else {
if (igf.taskjob == null) {
igf.getRecords(true, TaskJob.GETLIST, PrefManager.getDefaultList());
}
}
}
@Override
public void onRecordsLoadingFinished(TaskJob job) {
if (!job.equals(TaskJob.FORCESYNC)) {
return;
}
callbackCounter++;
if (callbackCounter >= 2) {
callbackCounter = 0;
if (job.equals(TaskJob.FORCESYNC)) {
NotificationManager nm = (NotificationManager) getApplicationContext().getSystemService(Context.NOTIFICATION_SERVICE);
nm.cancel(R.id.notification_sync);
}
}
}
@Override
public void onItemClick(int id, MALApi.ListType listType, String username) {
Intent startDetails = new Intent(this, DetailView.class);
startDetails.putExtra("recordID", id);
startDetails.putExtra("recordType", listType);
startDetails.putExtra("username", username);
startActivity(startDetails);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.Image:
Intent Profile = new Intent(this, ProfileActivity.class);
Profile.putExtra("username", username);
startActivity(Profile);
break;
case R.id.NDimage:
InputDialogFragment lcdf = new InputDialogFragment();
Bundle bundle = new Bundle();
bundle.putInt("id", R.id.NDimage);
bundle.putString("title", getString(R.string.dialog_title_update_navigation));
bundle.putString("hint", getString(R.string.dialog_message_update_navigation));
bundle.putString("message", PrefManager.getNavigationBackground());
lcdf.setArguments(bundle);
lcdf.setCallback(this);
lcdf.show(getFragmentManager(), "fragment_InputDialogFragment");
break;
}
}
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
if (menu != null)
menu.findItem(R.id.listType_rewatching).setTitle(getString(position == 0 ? R.string.listType_rewatching : R.string.listType_rereading));
}
@Override
public void onPageSelected(int position) {
}
@Override
public void onPageScrollStateChanged(int state) {
}
@Override
public void onPositiveButtonClicked() {
AccountService.clearData();
startActivity(new Intent(this, FirstTimeInit.class));
System.exit(0);
}
@Override
public boolean onNavigationItemSelected(MenuItem item) {
//Checking if the item should be checked & if the list status has been changed
switch (item.getItemId()) {
case R.id.nav_profile:
case R.id.nav_friends:
case R.id.nav_forum:
case R.id.nav_schedule:
case R.id.nav_charts:
case R.id.nav_settings:
case R.id.nav_support:
case R.id.nav_about:
break;
default:
// Set the list tracker to false. It will be updated later in the code.
myList = false;
if (item.isChecked())
item.setChecked(false);
else
item.setChecked(true);
break;
}
// disable swipeRefresh for other lists
af.setSwipeRefreshEnabled(myList);
mf.setSwipeRefreshEnabled(myList);
//Closing drawer on item click
drawerLayout.closeDrawers();
//Performing the action
switch (item.getItemId()) {
case R.id.nav_list:
getRecords(true, TaskJob.GETLIST, af.list);
myList = true;
break;
case R.id.nav_profile:
Intent Profile = new Intent(this, ProfileActivity.class);
Profile.putExtra("username", username);
startActivity(Profile);
break;
case R.id.nav_friends:
Intent Friends = new Intent(this, ProfileActivity.class);
Friends.putExtra("username", username);
Friends.putExtra("friends", username);
startActivity(Friends);
break;
case R.id.nav_forum:
if (networkAvailable)
startActivity(new Intent(this, ForumActivity.class));
else
Theme.Snackbar(this, R.string.toast_error_noConnectivity);
break;
case R.id.nav_schedule:
startActivity(new Intent(this, ScheduleActivity.class));
break;
case R.id.nav_charts:
startActivity(new Intent(this, ChartActivity.class));
break;
case R.id.nav_browse:
if (AccountService.isMAL())
startActivity(new Intent(this, BrowseActivity.class));
else
Theme.Snackbar(this, R.string.toast_info_disabled);
break;
case R.id.nav_logout: // Others subgroup
showLogoutDialog();
break;
case R.id.nav_settings:
startActivity(new Intent(this, Settings.class));
break;
case R.id.nav_support:
Mobihelp.showSupport(this);
break;
case R.id.nav_about:
startActivity(new Intent(this, AboutActivity.class));
break;
}
myListChanged();
return false;
}
@Override
public void onPosInputButtonClicked(String text, int id) {
Picasso.with(this)
.load(text)
.placeholder(R.drawable.atarashii_background)
.error(R.drawable.atarashii_background)
.into((ImageView) findViewById(R.id.NDimage));
PrefManager.setNavigationBackground(text);
PrefManager.commitChanges();
}
@Override
public void onNegInputButtonClicked(int id) {
Picasso.with(this)
.load(R.drawable.atarashii_background)
.placeholder(R.drawable.atarashii_background)
.error(R.drawable.atarashii_background)
.into((ImageView) findViewById(R.id.NDimage));
PrefManager.setNavigationBackground(null);
PrefManager.commitChanges();
}
}
|
Atarashii/src/net/somethingdreadful/MAL/Home.java
|
package net.somethingdreadful.MAL;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.SearchManager;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.os.Bundle;
import android.support.design.widget.NavigationView;
import android.support.v4.app.NotificationCompat;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.view.ViewPager;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import com.freshdesk.mobihelp.Mobihelp;
import com.squareup.picasso.Picasso;
import net.somethingdreadful.MAL.account.AccountService;
import net.somethingdreadful.MAL.adapters.IGFPagerAdapter;
import net.somethingdreadful.MAL.api.APIHelper;
import net.somethingdreadful.MAL.api.MALApi;
import net.somethingdreadful.MAL.dialog.ChooseDialogFragment;
import net.somethingdreadful.MAL.dialog.InputDialogFragment;
import net.somethingdreadful.MAL.tasks.TaskJob;
import butterknife.BindView;
import butterknife.ButterKnife;
public class Home extends AppCompatActivity implements ChooseDialogFragment.onClickListener, SwipeRefreshLayout.OnRefreshListener, IGF.IGFCallbackListener, View.OnClickListener, ViewPager.OnPageChangeListener, NavigationView.OnNavigationItemSelectedListener, InputDialogFragment.onClickListener {
private IGF af;
private IGF mf;
private Menu menu;
private BroadcastReceiver networkReceiver;
private String username;
private boolean networkAvailable = true;
private boolean myList = true; //tracks if the user is on 'My List' or not
private int callbackCounter = 0;
@BindView(R.id.navigationView)
NavigationView navigationView;
@BindView(R.id.drawerLayout)
DrawerLayout drawerLayout;
@Override
public void onCreate(Bundle state) {
super.onCreate(state);
//Initializing activity and application
Theme.context = getApplicationContext();
if (AccountService.AccountExists(this)) {
//The following is state handling code
if (state != null) {
myList = state.getBoolean("myList");
networkAvailable = state.getBoolean("networkAvailable", true);
}
//Initializing
Theme.setTheme(this, R.layout.activity_home, false);
Theme.setActionBar(this, new IGFPagerAdapter(getFragmentManager()));
ButterKnife.bind(this);
username = AccountService.getUsername();
//Initializing NavigationView
navigationView.setNavigationItemSelectedListener(this);
navigationView.getMenu().findItem(R.id.nav_list).setChecked(true);
Theme.setNavDrawer(navigationView, this, this);
//Initializing navigation toggle button
ActionBarDrawerToggle drawerToggle = new ActionBarDrawerToggle(this, drawerLayout, (Toolbar) findViewById(R.id.actionbar), R.string.drawer_open, R.string.drawer_close) {
};
drawerLayout.addDrawerListener(drawerToggle);
drawerToggle.syncState();
networkReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
checkNetworkAndDisplayCrouton();
myListChanged();
}
};
} else {
Intent firstRunInit = new Intent(this, FirstTimeInit.class);
startActivity(firstRunInit);
finish();
}
NfcHelper.disableBeam(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_home, menu);
SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE);
MenuItem searchItem = menu.findItem(R.id.action_search);
SearchView searchView = (SearchView) MenuItemCompat.getActionView(searchItem);
ComponentName cn = new ComponentName(this, SearchActivity.class);
searchView.setSearchableInfo(searchManager.getSearchableInfo(cn));
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.listType_all:
getRecords(true, TaskJob.GETLIST, 0);
setChecked(item);
break;
case R.id.listType_inprogress:
getRecords(true, TaskJob.GETLIST, 1);
setChecked(item);
break;
case R.id.listType_completed:
getRecords(true, TaskJob.GETLIST, 2);
setChecked(item);
break;
case R.id.listType_onhold:
getRecords(true, TaskJob.GETLIST, 3);
setChecked(item);
break;
case R.id.listType_dropped:
getRecords(true, TaskJob.GETLIST, 4);
setChecked(item);
break;
case R.id.listType_planned:
getRecords(true, TaskJob.GETLIST, 5);
setChecked(item);
break;
case R.id.listType_rewatching:
getRecords(true, TaskJob.GETLIST, 6);
setChecked(item);
break;
case R.id.forceSync:
synctask(true);
break;
case R.id.sort_title:
sortRecords(1, item);
break;
case R.id.sort_score:
sortRecords(2, item);
break;
case R.id.sort_type:
sortRecords(3, item);
break;
case R.id.sort_status:
sortRecords(4, item);
break;
case R.id.sort_progress:
sortRecords(5, item);
break;
case R.id.menu_details:
item.setChecked(!item.isChecked());
if (af != null && mf != null) {
af.details();
mf.details();
}
break;
case R.id.menu_inverse:
item.setChecked(!item.isChecked());
if (af != null && mf != null) {
af.inverse();
mf.inverse();
}
break;
}
return super.onOptionsItemSelected(item);
}
private void sortRecords(int sortType, MenuItem item) {
setChecked(item);
if (af != null && mf != null) {
af.sort(sortType);
mf.sort(sortType);
}
}
private void getRecords(boolean clear, TaskJob task, int list) {
if (af != null && mf != null) {
af.getRecords(clear, task, list);
mf.getRecords(clear, task, list);
if (task == TaskJob.FORCESYNC)
syncNotify();
}
}
@Override
public void onResume() {
super.onResume();
checkNetworkAndDisplayCrouton();
registerReceiver(networkReceiver, new IntentFilter("android.net.conn.CONNECTIVITY_CHANGE"));
}
@Override
public void onPause() {
super.onPause();
if (menu != null)
menu.findItem(R.id.action_search).collapseActionView();
unregisterReceiver(networkReceiver);
}
private void synctask(boolean clear) {
getRecords(clear, TaskJob.FORCESYNC, af.list);
}
@Override
public void onSaveInstanceState(Bundle state) {
//This is telling out future selves that we already have some things and not to do them
state.putBoolean("networkAvailable", networkAvailable);
state.putBoolean("myList", myList);
super.onSaveInstanceState(state);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
this.menu = menu;
if (af != null) {
//All this is handling the ticks in the switch list menu
switch (af.list) {
case 0:
setChecked(menu.findItem(R.id.listType_all));
break;
case 1:
setChecked(menu.findItem(R.id.listType_inprogress));
break;
case 2:
setChecked(menu.findItem(R.id.listType_completed));
break;
case 3:
setChecked(menu.findItem(R.id.listType_onhold));
break;
case 4:
setChecked(menu.findItem(R.id.listType_dropped));
break;
case 5:
setChecked(menu.findItem(R.id.listType_planned));
break;
case 6:
setChecked(menu.findItem(R.id.listType_rewatching));
break;
}
}
menu.findItem(R.id.sort_title).setChecked(true);
myListChanged();
return true;
}
private void setChecked(MenuItem item) {
if (item != null)
item.setChecked(true);
}
private void myListChanged() {
if (menu != null) {
if (af != null && mf != null)
menu.findItem(R.id.menu_details).setChecked(myList && af.getDetails());
menu.findItem(R.id.menu_listType).setVisible(myList);
menu.findItem(R.id.menu_sort).setVisible(myList);
menu.findItem(R.id.menu_inverse).setVisible(myList || (!AccountService.isMAL() && af.taskjob == TaskJob.GETMOSTPOPULAR));
menu.findItem(R.id.forceSync).setVisible(myList && networkAvailable);
menu.findItem(R.id.action_search).setVisible(networkAvailable);
}
}
/**
* Creates the sync notification.
*/
private void syncNotify() {
Intent notificationIntent = new Intent(this, Home.class);
PendingIntent contentIntent = PendingIntent.getActivity(this, 1, notificationIntent, PendingIntent.FLAG_CANCEL_CURRENT);
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(this)
.setOngoing(true)
.setSmallIcon(R.drawable.notification_icon)
.setContentTitle(getString(R.string.app_name))
.setContentText(getString(R.string.toast_info_SyncMessage))
.setContentIntent(contentIntent);
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.notify(R.id.notification_sync, mBuilder.build());
}
private void showLogoutDialog() {
ChooseDialogFragment lcdf = new ChooseDialogFragment();
Bundle bundle = new Bundle();
bundle.putString("title", getString(R.string.dialog_label_logout));
bundle.putString("message", getString(R.string.dialog_message_logout));
bundle.putString("positive", getString(R.string.dialog_label_logout));
lcdf.setArguments(bundle);
lcdf.setCallback(this);
lcdf.show(getFragmentManager(), "fragment_LogoutConfirmationDialog");
}
private void checkNetworkAndDisplayCrouton() {
if (APIHelper.isNetworkAvailable(this) && !networkAvailable)
synctask(false);
networkAvailable = APIHelper.isNetworkAvailable(this);
}
@Override
public void onRefresh() {
if (networkAvailable)
synctask(false);
else {
if (af != null && mf != null) {
af.toggleSwipeRefreshAnimation(false);
mf.toggleSwipeRefreshAnimation(false);
}
Theme.Snackbar(this, R.string.toast_error_noConnectivity);
}
}
@Override
public void onIGFReady(IGF igf) {
igf.setUsername(AccountService.getUsername());
if (igf.isAnime())
af = igf;
else
mf = igf;
// do forced sync after FirstInit
if (PrefManager.getForceSync()) {
if (af != null && mf != null) {
PrefManager.setForceSync(false);
PrefManager.commitChanges();
synctask(true);
}
} else {
if (igf.taskjob == null) {
igf.getRecords(true, TaskJob.GETLIST, PrefManager.getDefaultList());
}
}
}
@Override
public void onRecordsLoadingFinished(TaskJob job) {
if (!job.equals(TaskJob.FORCESYNC)) {
return;
}
callbackCounter++;
if (callbackCounter >= 2) {
callbackCounter = 0;
if (job.equals(TaskJob.FORCESYNC)) {
NotificationManager nm = (NotificationManager) getApplicationContext().getSystemService(Context.NOTIFICATION_SERVICE);
nm.cancel(R.id.notification_sync);
}
}
}
@Override
public void onItemClick(int id, MALApi.ListType listType, String username) {
Intent startDetails = new Intent(this, DetailView.class);
startDetails.putExtra("recordID", id);
startDetails.putExtra("recordType", listType);
startDetails.putExtra("username", username);
startActivity(startDetails);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.Image:
Intent Profile = new Intent(this, ProfileActivity.class);
Profile.putExtra("username", username);
startActivity(Profile);
break;
case R.id.NDimage:
InputDialogFragment lcdf = new InputDialogFragment();
Bundle bundle = new Bundle();
bundle.putInt("id", R.id.NDimage);
bundle.putString("title", getString(R.string.dialog_title_update_navigation));
bundle.putString("hint", getString(R.string.dialog_message_update_navigation));
bundle.putString("message", PrefManager.getNavigationBackground());
lcdf.setArguments(bundle);
lcdf.setCallback(this);
lcdf.show(getFragmentManager(), "fragment_InputDialogFragment");
break;
}
}
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
if (menu != null)
menu.findItem(R.id.listType_rewatching).setTitle(getString(position == 0 ? R.string.listType_rewatching : R.string.listType_rereading));
}
@Override
public void onPageSelected(int position) {
}
@Override
public void onPageScrollStateChanged(int state) {
}
@Override
public void onPositiveButtonClicked() {
AccountService.clearData();
startActivity(new Intent(this, FirstTimeInit.class));
System.exit(0);
}
@Override
public boolean onNavigationItemSelected(MenuItem item) {
//Checking if the item should be checked & if the list status has been changed
switch (item.getItemId()) {
case R.id.nav_profile:
case R.id.nav_friends:
case R.id.nav_forum:
case R.id.nav_schedule:
case R.id.nav_charts:
case R.id.nav_settings:
case R.id.nav_support:
case R.id.nav_about:
break;
default:
// Set the list tracker to false. It will be updated later in the code.
myList = false;
if (item.isChecked())
item.setChecked(false);
else
item.setChecked(true);
break;
}
// disable swipeRefresh for other lists
af.setSwipeRefreshEnabled(myList);
mf.setSwipeRefreshEnabled(myList);
//Closing drawer on item click
drawerLayout.closeDrawers();
//Performing the action
switch (item.getItemId()) {
case R.id.nav_list:
getRecords(true, TaskJob.GETLIST, af.list);
myList = true;
break;
case R.id.nav_profile:
Intent Profile = new Intent(this, ProfileActivity.class);
Profile.putExtra("username", username);
startActivity(Profile);
break;
case R.id.nav_friends:
Intent Friends = new Intent(this, ProfileActivity.class);
Friends.putExtra("username", username);
Friends.putExtra("friends", username);
startActivity(Friends);
break;
case R.id.nav_forum:
if (networkAvailable)
startActivity(new Intent(this, ForumActivity.class));
else
Theme.Snackbar(this, R.string.toast_error_noConnectivity);
break;
case R.id.nav_schedule:
startActivity(new Intent(this, ScheduleActivity.class));
break;
case R.id.nav_charts:
startActivity(new Intent(this, ChartActivity.class));
break;
case R.id.nav_browse:
startActivity(new Intent(this, BrowseActivity.class));
break;
case R.id.nav_logout: // Others subgroup
showLogoutDialog();
break;
case R.id.nav_settings:
startActivity(new Intent(this, Settings.class));
break;
case R.id.nav_support:
Mobihelp.showSupport(this);
break;
case R.id.nav_about:
startActivity(new Intent(this, AboutActivity.class));
break;
}
myListChanged();
return false;
}
@Override
public void onPosInputButtonClicked(String text, int id) {
Picasso.with(this)
.load(text)
.placeholder(R.drawable.atarashii_background)
.error(R.drawable.atarashii_background)
.into((ImageView) findViewById(R.id.NDimage));
PrefManager.setNavigationBackground(text);
PrefManager.commitChanges();
}
@Override
public void onNegInputButtonClicked(int id) {
Picasso.with(this)
.load(R.drawable.atarashii_background)
.placeholder(R.drawable.atarashii_background)
.error(R.drawable.atarashii_background)
.into((ImageView) findViewById(R.id.NDimage));
PrefManager.setNavigationBackground(null);
PrefManager.commitChanges();
}
}
|
Disable browse for AL
|
Atarashii/src/net/somethingdreadful/MAL/Home.java
|
Disable browse for AL
|
|
Java
|
isc
|
409cb23d06300ee6f4472f7e16acdc8eef907e0e
| 0
|
Mitsugaru/EntityManager
|
/*
*
*/
package net.milkycraft.Listeners;
import java.util.HashMap;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.milkycraft.Spawnegg;
import net.milkycraft.ASEConfiguration.Settings;
import org.bukkit.ChatColor;
import org.bukkit.Difficulty;
import org.bukkit.entity.Enderman;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityBreakDoorEvent;
import org.bukkit.event.entity.EntityChangeBlockEvent;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.entity.EntityShootBowEvent;
import org.bukkit.event.entity.FoodLevelChangeEvent;
import org.bukkit.event.entity.PigZapEvent;
import org.bukkit.event.painting.PaintingPlaceEvent;
import org.bukkit.event.player.PlayerFishEvent;
import org.bukkit.event.world.PortalCreateEvent;
import org.bukkit.event.world.PortalCreateEvent.CreateReason;
// TODO: Auto-generated Javadoc
/**
* The listener interface for receiving entities events.
* The class that is interested in processing a entities
* event implements this interface, and the object created
* with that class is registered with a component using the
* component's <code>addEntitiesListener<code> method. When
* the entities event occurs, that object's appropriate
* method is invoked.
*
* @see EntitiesEvent
*/
public class EntitiesListener implements Listener {
/** The alerters. */
public HashMap<Integer, Player> alerters = new HashMap<Integer, Player>();
/** The log. */
private static Logger log = Logger.getLogger("Minecraft");
/**
* Door break.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.LOWEST)
public void DoorBreak(EntityBreakDoorEvent e) {
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (e.getBlock().getWorld().getName().equals(worldname)) {
if (e.getBlock().getWorld().getDifficulty() == Difficulty.HARD) {
if (Settings.doorBreak) {
e.setCancelled(true);
}
if (Settings.logging) {
log.info("A door somewhere was not broken by a zombie!");
return;
}
}
}
}
}
/**
* Player damage.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.HIGHEST)
public void PlayerDamage(EntityDamageEvent e) {
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity() instanceof Player) {
if (e.getCause() == DamageCause.FALL) {
final Player p = (Player) e.getEntity();
if (p.hasPermission("entitymanager.nofall")) {
e.setDamage(0);
return;
}
}
}
}
}
}
/**
* Enderblock.
*
* @param e the e
*/
@EventHandler
public void Enderblock(EntityChangeBlockEvent e) {
if (e.getEntity() instanceof Enderman) {
if (Settings.enderPickup) {
e.setCancelled(true);
return;
}
}
}
/**
* Damage.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.LOWEST)
public void Damage(EntityDamageByEntityEvent e) {
final Entity Damaged = e.getEntity();
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (Damaged.getWorld().getName().equals(worldname)) {
if (Damaged instanceof LivingEntity) {
if (!(Damaged instanceof Player)) {
if (e.getDamager() instanceof Player) {
final Player p = (Player) e.getDamager();
if (Settings.mobdmg
&& !p.hasPermission("entitymanager.mob-damage")) {
e.setCancelled(true);
return;
}
}
} else if (e.getDamager() instanceof Player) {
if (e.getEntity() instanceof Player) {
if (Settings.pvp) {
if (e.getDamager() instanceof Player) {
final Player p = (Player) e.getDamager();
if (!p.hasPermission("entitymanager.pvp")) {
e.setCancelled(true);
p.sendMessage(ChatColor.GREEN
+ "[EM]"
+ ChatColor.RED
+ "PVP is disabled in the world: "
+ ChatColor.YELLOW
+ e.getEntity().getWorld()
.getName() + ".");
return;
}
}
}
}
}
}
}
}
}
/**
* Hunger.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.LOW)
public void hunger(FoodLevelChangeEvent e) {
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity().hasPermission("entitymanager.nohunger")) {
e.setCancelled(true);
return;
}
}
}
}
/**
* On portal create.
*
* @param e the e
*/
@EventHandler
public void onPortalCreate(PortalCreateEvent e) {
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (e.getWorld().getName().equals(worldname)) {
if (e.getReason() == CreateReason.FIRE) {
if (Settings.portals) {
e.setCancelled(true);
return;
}
}
}
}
}
/**
* On painting place.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.LOWEST)
public void onPaintingPlace(PaintingPlaceEvent e) {
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (e.getBlock().getWorld().getName().equals(worldname)) {
if (Settings.paintz
&& !e.getPlayer().hasPermission(
"entitymanager.painting")) {
e.setCancelled(true);
e.getPlayer()
.sendMessage(
ChatColor.GREEN
+ "[EM] "
+ ChatColor.RED
+ "You dont have permission to place paintings");
if (Settings.logging) {
log.info(e.getPlayer().getDisplayName()
+ " tried to place a painting");
return;
}
}
}
}
}
/**
* On pig zap.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.LOWEST)
public void onPigZap(PigZapEvent e) {
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (e.getPigZombie().getWorld().getName().equals(worldname)) {
if (Settings.getConfig().getBoolean("disabled.mobs.pig_zombie")) {
e.setCancelled(true);
e.getEntity().remove();
if (Settings.logging) {
Spawnegg.log
.log(Level.WARNING,
"[EM] A pig was zapped but pigmans are disabled! Removing pig!");
}
}
}
}
}
/**
* On fishing attempt.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.LOWEST)
public void onFishingAttempt(PlayerFishEvent e) {
final Player player = e.getPlayer();
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (player.getWorld().getName().equals(worldname)) {
if (Settings.fishing
&& !player.hasPermission("entitymanager.fishing")) {
e.setCancelled(true);
player.sendMessage(ChatColor.GREEN + "[EM] "
+ ChatColor.RED
+ "You dont have permission to fish");
if (Settings.logging) {
log.info(e.getPlayer().getDisplayName()
+ " tried to fish");
}
}
}
}
}
/**
* Onarrowshoot.
*
* @param e the e
*/
@EventHandler(priority = EventPriority.NORMAL)
public void onarrowshoot(EntityShootBowEvent e) {
final List<String> worldz = Settings.worlds;
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity() instanceof Player) {
final Player p = (Player) e.getEntity();
if (Settings.arrowz
&& !p.hasPermission("entitymanager.arrows")) {
e.setCancelled(true);
p.sendMessage(ChatColor.GREEN + "[EM] " + ChatColor.RED
+ "You dont have permission to shoot arrows");
return;
}
}
}
}
}
}
|
src/net/milkycraft/Listeners/EntitiesListener.java
|
package net.milkycraft.Listeners;
import java.util.HashMap;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.milkycraft.Spawnegg;
import org.bukkit.ChatColor;
import org.bukkit.Difficulty;
import org.bukkit.Location;
import org.bukkit.entity.Arrow;
import org.bukkit.entity.Enderman;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityBreakDoorEvent;
import org.bukkit.event.entity.EntityChangeBlockEvent;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.EntityDamageEvent.DamageCause;
import org.bukkit.event.entity.EntityShootBowEvent;
import org.bukkit.event.entity.FoodLevelChangeEvent;
import org.bukkit.event.entity.PigZapEvent;
import org.bukkit.event.entity.ProjectileHitEvent;
import org.bukkit.event.painting.PaintingPlaceEvent;
import org.bukkit.event.player.PlayerFishEvent;
import org.bukkit.event.world.PortalCreateEvent;
import org.bukkit.event.world.PortalCreateEvent.CreateReason;
public class EntitiesListener implements Listener {
Spawnegg plugin;
public HashMap<Integer, Player> alerters = new HashMap<Integer, Player>();
public EntitiesListener(Spawnegg instance) {
plugin = instance;
}
private static Logger log = Logger.getLogger("Minecraft");
@EventHandler(priority = EventPriority.LOW)
public void DoorBreak(EntityBreakDoorEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getBlock().getWorld().getName().equals(worldname)) {
if (e.getBlock().getWorld().getDifficulty() == Difficulty.HARD) {
if (plugin.getConfig().getBoolean(
"block.Actions.zombie-door-break")) {
e.setCancelled(true);
}
if (plugin.getConfig().getBoolean("EntityManager.Logging")) {
log.info("A door somewhere was not broken by a zombie!");
return;
}
}
}
}
}
@EventHandler(priority = EventPriority.HIGH)
public void PlayerDamage(EntityDamageEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity() instanceof Player) {
if (e.getCause() == DamageCause.FALL) {
final Player p = (Player) e.getEntity();
if (p.hasPermission("entitymanager.nofall")) {
e.setDamage(0);
return;
}
}
}
}
}
}
@EventHandler
public void Enderblock(EntityChangeBlockEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getBlock().getWorld().getName().equals(worldname)) {
if (e.getEntity() instanceof Enderman) {
if (plugin.getConfig().getBoolean(
"block.Actions.ender-pickup")) {
e.setCancelled(true);
return;
}
}
}
}
}
@EventHandler(priority = EventPriority.HIGH)
public void Damage(EntityDamageByEntityEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity() instanceof LivingEntity) {
if (!(e.getEntity() instanceof Player)) {
if (e.getDamager() instanceof Player) {
final Player p = (Player) e.getDamager();
if (plugin.getConfig().getBoolean(
"block.Actions.mob-damage")
&& !p.hasPermission("entitymanager.mob-damage")) {
e.setCancelled(true);
return;
}
}
} else if (e.getDamager() instanceof Player) {
if (e.getEntity() instanceof Player) {
if (plugin.getConfig().getBoolean(
"block.Actions.pvp")) {
if (e.getDamager() instanceof Player) {
final Player p = (Player) e.getDamager();
if (!p.hasPermission("entitymanager.pvp")) {
e.setCancelled(true);
p.sendMessage(ChatColor.GREEN
+ "[EM]"
+ ChatColor.RED
+ "PVP is disabled in the world: "
+ ChatColor.YELLOW
+ e.getEntity().getWorld()
.getName() + ".");
return;
}
}
}
}
}
}
}
}
}
@EventHandler(priority = EventPriority.LOW)
public void hunger(FoodLevelChangeEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity().hasPermission("entitymanager.nohunger")) {
e.setCancelled(true);
return;
}
}
}
}
@EventHandler(priority = EventPriority.NORMAL)
public void onArrowHit(ProjectileHitEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity() instanceof Arrow) {
Location loc = e.getEntity().getLocation();
int ep;
ep = plugin.getConfig().getInt(
"extra.explosivearrows.power");
float p;
p = (float) ep;
Entity shooter = e.getEntity().getShooter();
Entity player = (Entity) shooter;
if (shooter instanceof Player) {
Player playa = (Player) e.getEntity().getShooter();
if (playa
.hasPermission("entitymanager.explosivearrows")) {
player.getWorld().createExplosion(loc, p);
return;
}
}
}
}
}
}
@EventHandler
public void onPortalCreate(PortalCreateEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getWorld().getName().equals(worldname)) {
if (e.getReason() == CreateReason.FIRE) {
if (plugin.getConfig().getBoolean(
"block.Creation-of.portals")) {
e.setCancelled(true);
return;
}
}
}
}
}
@EventHandler(priority = EventPriority.LOWEST)
public void onPaintingPlace(PaintingPlaceEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getBlock().getWorld().getName().equals(worldname)) {
if (plugin.getConfig().getBoolean("block.Entities.Paintings")
&& !e.getPlayer().hasPermission(
"entitymanager.painting")) {
e.setCancelled(true);
e.getPlayer()
.sendMessage(
ChatColor.GREEN
+ "[EM] "
+ ChatColor.RED
+ "You dont have permission to place paintings");
if (plugin.getConfig().getBoolean("EntityManager.Logging")) {
log.info(e.getPlayer().getDisplayName()
+ " tried to place a painting");
return;
}
}
}
}
}
@EventHandler(priority = EventPriority.LOWEST)
public void onPigZap(PigZapEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getPigZombie().getWorld().getName().equals(worldname)) {
if (plugin.getConfig().getBoolean("disabled.mobs.pig_zombie")) {
e.setCancelled(true);
e.getEntity().remove();
if (plugin.getConfig().getBoolean("EntityManager.Logging")) {
Spawnegg.log
.log(Level.WARNING,
"[EM] A pig was zapped but pigmans are disabled! Removing pig!");
}
}
}
}
}
@EventHandler(priority = EventPriority.LOWEST)
public void onFishingAttempt(PlayerFishEvent e) {
final Player player = e.getPlayer();
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (player.getWorld().getName().equals(worldname)) {
if (plugin.getConfig().getBoolean("block.Actions.fishing")
&& !player.hasPermission("entitymanager.fishing")) {
e.setCancelled(true);
player.sendMessage(ChatColor.GREEN + "[EM] "
+ ChatColor.RED
+ "You dont have permission to fish");
if (plugin.getConfig().getBoolean("EntityManager.Logging")) {
log.info(e.getPlayer().getDisplayName()
+ " tried to fish");
}
}
}
}
}
@EventHandler(priority = EventPriority.NORMAL)
public void onarrowshoot(EntityShootBowEvent e) {
List<String> worldz = plugin.getConfig().getStringList(
"World.Worldname");
for (String worldname : worldz) {
if (e.getEntity().getWorld().getName().equals(worldname)) {
if (e.getEntity() instanceof Player) {
final Player p = (Player) e.getEntity();
if (plugin.getConfig().getBoolean("block.Actions.arrows")
&& !p.hasPermission("entitymanager.arrows")) {
e.setCancelled(true);
p.sendMessage(ChatColor.GREEN + "[EM] " + ChatColor.RED
+ "You dont have permission to shoot arrows");
return;
}
}
}
}
}
}
|
Code enchancements
|
src/net/milkycraft/Listeners/EntitiesListener.java
|
Code enchancements
|
|
Java
|
mit
|
69f8135423094eaf1ecc23df30ce664db4fa1d30
| 0
|
Nunnery/MythicDrops
|
package net.nunnerycode.bukkit.mythicdrops;
import com.conventnunnery.libraries.config.CommentedConventYamlConfiguration;
import com.conventnunnery.libraries.config.ConventYamlConfiguration;
import com.modcrafting.diablodrops.name.NamesLoader;
import net.nunnerycode.bukkit.mythicdrops.api.MythicDrops;
import net.nunnerycode.bukkit.mythicdrops.api.enchantments.MythicEnchantment;
import net.nunnerycode.bukkit.mythicdrops.api.items.CustomItem;
import net.nunnerycode.bukkit.mythicdrops.api.names.NameType;
import net.nunnerycode.bukkit.mythicdrops.api.settings.ConfigSettings;
import net.nunnerycode.bukkit.mythicdrops.api.tiers.Tier;
import net.nunnerycode.bukkit.mythicdrops.commands.MythicDropsCommand;
import net.nunnerycode.bukkit.mythicdrops.items.CustomItemBuilder;
import net.nunnerycode.bukkit.mythicdrops.items.CustomItemMap;
import net.nunnerycode.bukkit.mythicdrops.names.NameMap;
import net.nunnerycode.bukkit.mythicdrops.settings.MythicConfigSettings;
import net.nunnerycode.bukkit.mythicdrops.spawning.ItemSpawningListener;
import net.nunnerycode.bukkit.mythicdrops.tiers.MythicTierBuilder;
import net.nunnerycode.bukkit.mythicdrops.tiers.TierMap;
import net.nunnerycode.bukkit.mythicdrops.utils.ChatColorUtil;
import net.nunnerycode.bukkit.mythicdrops.utils.TierUtil;
import net.nunnerycode.java.libraries.cannonball.DebugPrinter;
import org.bukkit.Bukkit;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.enchantments.EnchantmentWrapper;
import org.bukkit.entity.EntityType;
import org.bukkit.material.MaterialData;
import org.bukkit.plugin.java.JavaPlugin;
import se.ranzdo.bukkit.methodcommand.CommandHandler;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
public final class MythicDropsPlugin extends JavaPlugin implements MythicDrops {
private static MythicDrops _INSTANCE;
private ConfigSettings configSettings;
private DebugPrinter debugPrinter;
private CommentedConventYamlConfiguration configYAML;
private CommentedConventYamlConfiguration customItemYAML;
private CommentedConventYamlConfiguration itemGroupYAML;
private CommentedConventYamlConfiguration languageYAML;
private CommentedConventYamlConfiguration tierYAML;
private CommentedConventYamlConfiguration creatureSpawningYAML;
private NamesLoader namesLoader;
private CommandHandler commandHandler;
public static MythicDrops getInstance() {
return _INSTANCE;
}
@Override
public void onEnable() {
_INSTANCE = this;
debugPrinter = new DebugPrinter(getDataFolder().getPath(), "debug.log");
configSettings = new MythicConfigSettings();
namesLoader = new NamesLoader(this);
unpackConfigurationFiles(new String[]{"config.yml", "customItems.yml", "itemGroups.yml", "language.yml",
"tier.yml", "creatureSpawning.yml"}, false);
configYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "config.yml"),
YamlConfiguration.loadConfiguration(getResource("config.yml")).getString("version"));
configYAML.options().backupOnUpdate(true);
configYAML.options().updateOnLoad(true);
configYAML.load();
customItemYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "customItems.yml"),
YamlConfiguration.loadConfiguration(getResource("customItems.yml")).getString("version"));
customItemYAML.options().backupOnUpdate(true);
customItemYAML.options().updateOnLoad(true);
customItemYAML.load();
itemGroupYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "itemGroups.yml"),
YamlConfiguration.loadConfiguration(getResource("itemGroups.yml")).getString("version"));
itemGroupYAML.options().backupOnUpdate(true);
itemGroupYAML.options().updateOnLoad(true);
itemGroupYAML.load();
languageYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "language.yml"),
YamlConfiguration.loadConfiguration(getResource("language.yml")).getString("version"));
languageYAML.options().backupOnUpdate(true);
languageYAML.options().updateOnLoad(true);
languageYAML.load();
tierYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "tier.yml"),
YamlConfiguration.loadConfiguration(getResource("tier.yml")).getString("version"));
tierYAML.options().backupOnUpdate(true);
tierYAML.options().updateOnLoad(true);
tierYAML.load();
creatureSpawningYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "creatureSpawning.yml"),
YamlConfiguration.loadConfiguration(getResource("creatureSpawning.yml")).getString("version"));
creatureSpawningYAML.options().pathSeparator('/');
creatureSpawningYAML.options().backupOnUpdate(true);
creatureSpawningYAML.options().updateOnLoad(true);
creatureSpawningYAML.load();
writeResourceFiles();
debugInformation();
reloadTiers();
reloadSettings();
reloadCustomItems();
reloadNames();
commandHandler = new CommandHandler(this);
commandHandler.registerCommands(new MythicDropsCommand(this));
Bukkit.getPluginManager().registerEvents(new ItemSpawningListener(this), this);
debugPrinter.debug(Level.INFO, "v" + getDescription().getVersion() + " enabled");
}
private void debugInformation() {
List<String> strings= new ArrayList<>();
for (Enchantment e : Enchantment.values()) {
strings.add(new EnchantmentWrapper(e.getId()).getName());
}
debugPrinter.debug(Level.INFO, "Enchantments: " + strings.toString());
strings.clear();
for (EntityType et : EntityType.values()) {
strings.add(et.name());
}
debugPrinter.debug(Level.INFO, "EntityTypes: " + strings.toString());
}
private void writeResourceFiles() {
namesLoader.writeDefault("/resources/lore/general.txt", false, true);
namesLoader.writeDefault("/resources/lore/enchantments/damage_all.txt", false, true);
namesLoader.writeDefault("/resources/lore/materials/diamond_sword.txt", false, true);
namesLoader.writeDefault("/resources/lore/tiers/legendary.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/general.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/enchantments/damage_all.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/materials/diamond_sword.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/tiers/legendary.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/general.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/enchantments/damage_all.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/materials/diamond_sword.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/tiers/legendary.txt", false, true);
}
private void unpackConfigurationFiles(String[] configurationFiles, boolean overwrite) {
for (String s : configurationFiles) {
YamlConfiguration yc = YamlConfiguration.loadConfiguration(getResource(s));
try {
File f = new File(getDataFolder(), s);
if (!f.exists()) {
yc.save(f);
continue;
}
if (overwrite) {
yc.save(f);
}
} catch (IOException e) {
getLogger().warning("Could not unpack " + s);
}
}
}
@Override
public ConfigSettings getConfigSettings() {
return configSettings;
}
@Override
public DebugPrinter getDebugPrinter() {
return debugPrinter;
}
@Override
public ConventYamlConfiguration getConfigYAML() {
return configYAML;
}
@Override
public ConventYamlConfiguration getCustomItemYAML() {
return customItemYAML;
}
@Override
public ConventYamlConfiguration getItemGroupYAML() {
return itemGroupYAML;
}
@Override
public ConventYamlConfiguration getLanguageYAML() {
return languageYAML;
}
@Override
public ConventYamlConfiguration getTierYAML() {
return tierYAML;
}
@Override
public void reloadSettings() {
MythicConfigSettings mcs = new MythicConfigSettings();
if (configYAML != null) {
mcs.setAutoUpdate(configYAML.getBoolean("options.autoUpdate", false));
mcs.setDebugMode(configYAML.getBoolean("options.debugMode", false));
mcs.setScriptsDirectory(configYAML.getString("options.scriptsDirectory", "scripts"));
mcs.setItemDisplayNameFormat(configYAML.getString("display.itemDisplayNameFormat",
"%generalprefix% %generalsuffix%"));
mcs.setRandomLoreEnabled(configYAML.getBoolean("display.tooltips.randomLoreEnabled", false));
mcs.setRandomLoreChance(configYAML.getDouble("display.tooltips.randomLoreChance", 0.25));
mcs.getTooltipFormat().addAll(configYAML.getStringList("display.tooltips.format"));
}
if (itemGroupYAML != null && itemGroupYAML.isConfigurationSection("itemGroups")) {
ConfigurationSection idCS = itemGroupYAML.getConfigurationSection("itemGroups");
if (idCS.isConfigurationSection("toolGroups")) {
List<String> toolGroupList = new ArrayList<>();
ConfigurationSection toolCS = idCS.getConfigurationSection("toolGroups");
for (String toolKind : toolCS.getKeys(false)) {
List<String> idList = toolCS.getStringList(toolKind);
toolGroupList.add(toolKind + " (" + idList.size() + ")");
mcs.getItemTypesWithIds().put(toolKind.toLowerCase(), idList);
mcs.getToolTypes().add(toolKind.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded tool groups: " + toolGroupList.toString());
}
if (idCS.isConfigurationSection("armorGroups")) {
List<String> armorGroupList = new ArrayList<>();
ConfigurationSection armorCS = idCS.getConfigurationSection("armorGroups");
for (String armorKind : armorCS.getKeys(false)) {
List<String> idList = armorCS.getStringList(armorKind);
armorGroupList.add(armorKind + " (" + idList.size() + ")");
mcs.getItemTypesWithIds().put(armorKind.toLowerCase(), idList);
mcs.getArmorTypes().add(armorKind.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded armor groups: " + armorGroupList.toString());
}
if (idCS.isConfigurationSection("materialGroups")) {
List<String> materialGroupList = new ArrayList<String>();
ConfigurationSection materialCS = idCS.getConfigurationSection("materialGroups");
for (String materialKind : materialCS.getKeys(false)) {
List<String> idList = materialCS.getStringList(materialKind);
materialGroupList.add(materialKind + " (" + idList.size() + ")");
mcs.getMaterialTypesWithIds().put(materialKind.toLowerCase(), idList);
mcs.getMaterialTypes().add(materialKind.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded material groups: " + materialGroupList.toString());
}
}
if (languageYAML != null) {
for (String s : languageYAML.getKeys(true)) {
if (languageYAML.isConfigurationSection(s)) {
continue;
}
mcs.getLanguageMap().put(s, languageYAML.getString(s, s));
}
}
if (creatureSpawningYAML != null) {
mcs.setCanMobsPickUpEquipment(creatureSpawningYAML.getBoolean("options/can-mobs-pick-up-equipment", true));
mcs.setBlankMobSpawnEnabled(creatureSpawningYAML.getBoolean("options/blank-mob-spawn.enabled", false));
mcs.setBlankMobSpawnSkeletonsSpawnWithBows(!creatureSpawningYAML.getBoolean("options/blank-mob-spawn" +
"/skeletons-no-bow", false));
mcs.setGlobalSpawnChance(creatureSpawningYAML.getDouble("globalSpawnChance", 0.25));
mcs.setPreventCustom(creatureSpawningYAML.getBoolean("spawnPrevention/custom", true));
mcs.setPreventSpawner(creatureSpawningYAML.getBoolean("spawnPrevention/spawner", true));
mcs.setPreventSpawnEgg(creatureSpawningYAML.getBoolean("spawnPrevention/spawnEgg", true));
if (creatureSpawningYAML.isConfigurationSection("spawnPrevention/aboveY")) {
ConfigurationSection cs = creatureSpawningYAML.getConfigurationSection("spawnPrevention/aboveY");
for (String wn : cs.getKeys(false)) {
if (cs.isConfigurationSection(wn)) {
continue;
}
mcs.setSpawnHeightLimit(wn, cs.getInt(wn, 255));
}
}
mcs.setCustomItemsSpawn(creatureSpawningYAML.getBoolean("customItems/spawn", true));
mcs.setOnlyCustomItemsSpawn(creatureSpawningYAML.getBoolean("customItems/onlySpawn", false));
mcs.setCustomItemSpawnChance(creatureSpawningYAML.getDouble("customItems/chance", 0.05));
if (creatureSpawningYAML.isConfigurationSection("tierDrops")) {
ConfigurationSection cs = creatureSpawningYAML.getConfigurationSection("tierDrops");
for (String key : cs.getKeys(false)) {
if (cs.isConfigurationSection(key)) {
continue;
}
List<String> strings = cs.getStringList(key);
EntityType et = null;
try {
et = EntityType.valueOf(key);
} catch (Exception e) {
continue;
}
Set<Tier> tiers = new HashSet<>(TierUtil.getTiersFromStrings(strings));
debugPrinter.debug(Level.INFO, et.name() + " | " + TierUtil.getStringsFromTiers(tiers).toString());
mcs.setEntityTypeTiers(et, tiers);
}
}
if (creatureSpawningYAML.isConfigurationSection("spawnWithDropChance")) {
ConfigurationSection cs = creatureSpawningYAML.getConfigurationSection("spawnWithDropChance");
for (String key : cs.getKeys(false)) {
if (cs.isConfigurationSection(key)) {
continue;
}
EntityType et = null;
try {
et = EntityType.valueOf(key);
} catch (Exception e) {
continue;
}
double d = cs.getDouble(key, 0D);
mcs.setEntityTypeChance(et, d);
}
}
}
this.configSettings = mcs;
}
@Override
public void reloadTiers() {
TierMap.getInstance().clear();
CommentedConventYamlConfiguration c = tierYAML;
if (c == null) {
return;
}
List<String> loadedTierNames = new ArrayList<>();
for (String key : c.getKeys(false)) {
// Check if the key has other fields under it and if not, move on to the next
if (!c.isConfigurationSection(key)) {
continue;
}
ConfigurationSection cs = c.getConfigurationSection(key);
MythicTierBuilder builder = new MythicTierBuilder(key.toLowerCase());
builder.withDisplayName(cs.getString("displayName", key));
builder.withDisplayColor(ChatColorUtil.getChatColorOrFallback(cs.getString("displayColor"),
ChatColorUtil.getRandomChatColor()));
builder.withIdentificationColor(ChatColorUtil.getChatColorOrFallback(cs.getString("identifierColor")
, ChatColorUtil.getRandomChatColor()));
ConfigurationSection enchCS = cs.getConfigurationSection("enchantments");
if (enchCS != null) {
builder.withSafeBaseEnchantments(enchCS.getBoolean("safeBaseEnchantments", true));
builder.withSafeBonusEnchantments(enchCS.getBoolean("safeBonusEnchantments", true));
builder.withHighBaseEnchantments(enchCS.getBoolean("allowHighBaseEnchantments", true));
builder.withHighBonusEnchantments(enchCS.getBoolean("allowHighBonusEnchantments", true));
builder.withMinimumBonusEnchantments(enchCS.getInt("minimumBonusEnchantments", 0));
builder.withMaximumBonusEnchantments(enchCS.getInt("maximumBonusEnchantments", 0));
Set<MythicEnchantment> baseEnchantments = new HashSet<>();
List<String> baseEnchantStrings = enchCS.getStringList("baseEnchantments");
for (String s : baseEnchantStrings) {
MythicEnchantment me = MythicEnchantment.fromString(s);
if (me != null) {
baseEnchantments.add(me);
}
}
builder.withBaseEnchantments(baseEnchantments);
Set<MythicEnchantment> bonusEnchantments = new HashSet<>();
List<String> bonusEnchantStrings = enchCS.getStringList("bonusEnchantments");
for (String s : bonusEnchantStrings) {
MythicEnchantment me = MythicEnchantment.fromString(s);
if (me != null) {
bonusEnchantments.add(me);
}
}
builder.withBonusEnchantments(bonusEnchantments);
}
ConfigurationSection loreCS = cs.getConfigurationSection("lore");
if (loreCS != null) {
builder.withMinimumBonusLore(loreCS.getInt("minimumBonusLore", 0));
builder.withMaximumBonusLore(loreCS.getInt("maximumBonusLore", 0));
builder.withBaseLore(loreCS.getStringList("baseLore"));
builder.withBonusLore(loreCS.getStringList("bonusLore"));
}
builder.withMinimumDurabilityPercentage(cs.getDouble("minimumDurabilityPercentage", 1.0));
builder.withMaximumDurabilityPercentage(cs.getDouble("maximumDurabilityPercentage", 1.0));
builder.withMinimumSockets(cs.getInt("minimumSockets", 0));
builder.withMaximumSockets(cs.getInt("maximumSockets", 0));
builder.withAllowedItemGroups(cs.getStringList("itemTypes.allowedGroups"));
builder.withDisallowedItemGroups(cs.getStringList("itemTypes.disallowedGroups"));
builder.withAllowedItemIds(cs.getStringList("itemTypes.allowedItemIds"));
builder.withDisallowedItemIds(cs.getStringList("itemTypes.disallowedItemIds"));
if (cs.isConfigurationSection("chanceToSpawnOnAMonster")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
for (String k : cs.getConfigurationSection("chanceToSpawnOnAMonster").getKeys(false)) {
chanceToSpawnMap.put(k, cs.getDouble("chanceToSpawnOnAMonster." + k, 0));
chanceToSpawnMap.put("default", cs.getDouble("chanceToSpawnOnAMonster"));
}
builder.withWorldSpawnChanceMap(chanceToSpawnMap);
} else if (cs.isSet("chanceToSpawnOnAMonster")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
chanceToSpawnMap.put("default", cs.getDouble("chanceToSpawnOnAMonster"));
builder.withWorldSpawnChanceMap(chanceToSpawnMap);
}
if (cs.isConfigurationSection("chanceToDropOnMonsterDeath")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
for (String k : cs.getConfigurationSection("chanceToDropOnMonsterDeath").getKeys(false)) {
chanceToSpawnMap.put(k, cs.getDouble("chanceToDropOnMonsterDeath." + k, 1.0));
chanceToSpawnMap.put("default", cs.getDouble("chanceToDropOnMonsterDeath", 1.0));
}
builder.withWorldDropChanceMap(chanceToSpawnMap);
} else if (cs.isSet("chanceToDropOnMonsterDeath")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
chanceToSpawnMap.put("default", cs.getDouble("chanceToDropOnMonsterDeath", 1.0));
builder.withWorldDropChanceMap(chanceToSpawnMap);
}
Tier t = builder.build();
if (t.getDisplayColor() == t.getIdentificationColor()) {
debugPrinter.debug(Level.INFO, "Cannot load " + t.getName() + " due to displayColor and " +
"identificationColor being the same");
continue;
}
TierMap.getInstance().put(key.toLowerCase(), t);
loadedTierNames.add(key.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded tiers: " + loadedTierNames.toString());
}
@Override
public void reloadCustomItems() {
CustomItemMap.getInstance().clear();
CommentedConventYamlConfiguration c = customItemYAML;
if (c == null) {
return;
}
List<String> loadedCustomItemsNames = new ArrayList<>();
for (String key : c.getKeys(false)) {
if (!c.isConfigurationSection(key)) {
continue;
}
ConfigurationSection cs = c.getConfigurationSection(key);
CustomItemBuilder builder = new CustomItemBuilder(key);
MaterialData materialData = new MaterialData(cs.getInt("materialID", 0), (byte) cs.getInt("materialData",
0));
if (materialData.getItemTypeId() == 0) {
continue;
}
builder.withMaterialData(materialData);
builder.withDisplayName(cs.getString("displayName", key));
builder.withLore(cs.getStringList("lore"));
builder.withChanceToBeGivenToMonster(cs.getDouble("chanceToBeGivenToAMonster", 0));
builder.withChanceToDropOnDeath(cs.getDouble("chanceToDropOnDeath", 0));
Map<Enchantment, Integer> enchantments = new HashMap<>();
if (cs.isConfigurationSection("enchantments")) {
for (String ench : cs.getConfigurationSection("enchantments").getKeys(false)) {
Enchantment enchantment = Enchantment.getByName(ench);
if (enchantment == null) {
continue;
}
enchantments.put(enchantment, cs.getInt("enchantments." + ench));
}
}
builder.withEnchantments(enchantments);
CustomItem ci = builder.build();
CustomItemMap.getInstance().put(key, ci);
loadedCustomItemsNames.add(key);
}
debugPrinter.debug(Level.INFO, "Loaded custom items: " + loadedCustomItemsNames.toString());
}
@Override
public void reloadNames() {
NameMap.getInstance().clear();
loadPrefixes();
loadSuffixes();
loadLore();
}
@Override
public CommandHandler getCommandHandler() {
return commandHandler;
}
private void loadLore() {
Map<String, List<String>> lore = new HashMap<>();
File loreFolder = new File(getDataFolder(), "/resources/lore/");
if (!loreFolder.exists() && !loreFolder.mkdirs()) {
return;
}
List<String> generalLore = new ArrayList<>();
namesLoader.loadFile(generalLore, "/resources/lore/general.txt");
lore.put(NameType.GENERAL_LORE.getFormat(), generalLore);
int numOfLoadedLore = generalLore.size();
File tierLoreFolder = new File(loreFolder, "/tiers/");
if (tierLoreFolder.exists() && tierLoreFolder.isDirectory()) {
for (File f : tierLoreFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> loreList = new ArrayList<>();
namesLoader.loadFile(loreList, "/resources/lore/tiers/" + f.getName());
lore.put(NameType.TIER_LORE.getFormat() + f.getName().replace(".txt", ""), loreList);
numOfLoadedLore += loreList.size();
}
}
}
File materialLoreFolder = new File(loreFolder, "/materials/");
if (materialLoreFolder.exists() && materialLoreFolder.isDirectory()) {
for (File f : materialLoreFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> loreList = new ArrayList<>();
namesLoader.loadFile(loreList, "/resources/lore/materials/" + f.getName());
lore.put(NameType.MATERIAL_LORE.getFormat() + f.getName().replace(".txt", ""), loreList);
numOfLoadedLore += loreList.size();
}
}
}
File enchantmentLoreFolder = new File(loreFolder, "/enchantments/");
if (enchantmentLoreFolder.exists() && enchantmentLoreFolder.isDirectory()) {
for (File f : enchantmentLoreFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> loreList = new ArrayList<>();
namesLoader.loadFile(loreList, "/resources/lore/enchantments/" + f.getName());
lore.put(NameType.ENCHANTMENT_LORE.getFormat() + f.getName().replace(".txt", ""), loreList);
numOfLoadedLore += loreList.size();
}
}
}
debugPrinter.debug(Level.INFO, "Loaded lore: " + numOfLoadedLore);
NameMap.getInstance().putAll(lore);
}
private void loadSuffixes() {
Map<String, List<String>> suffixes = new HashMap<>();
File suffixFolder = new File(getDataFolder(), "/resources/suffixes/");
if (!suffixFolder.exists() && !suffixFolder.mkdirs()) {
return;
}
List<String> generalSuffixes = new ArrayList<>();
namesLoader.loadFile(generalSuffixes, "/resources/suffixes/general.txt");
suffixes.put(NameType.GENERAL_SUFFIX.getFormat(), generalSuffixes);
int numOfLoadedSuffixes = generalSuffixes.size();
File tierSuffixFolder = new File(suffixFolder, "/tiers/");
if (tierSuffixFolder.exists() && tierSuffixFolder.isDirectory()) {
for (File f : tierSuffixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> suffixList = new ArrayList<>();
namesLoader.loadFile(suffixList, "/resources/suffixes/tiers/" + f.getName());
suffixes.put(NameType.TIER_SUFFIX.getFormat() + f.getName().replace(".txt", ""), suffixList);
numOfLoadedSuffixes += suffixList.size();
}
}
}
File materialSuffixFolder = new File(suffixFolder, "/materials/");
if (materialSuffixFolder.exists() && materialSuffixFolder.isDirectory()) {
for (File f : materialSuffixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> suffixList = new ArrayList<>();
namesLoader.loadFile(suffixList, "/resources/suffixes/materials/" + f.getName());
suffixes.put(NameType.MATERIAL_SUFFIX.getFormat() + f.getName().replace(".txt", ""), suffixList);
numOfLoadedSuffixes += suffixList.size();
}
}
}
File enchantmentSuffixFolder = new File(suffixFolder, "/enchantments/");
if (enchantmentSuffixFolder.exists() && enchantmentSuffixFolder.isDirectory()) {
for (File f : enchantmentSuffixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> suffixList = new ArrayList<>();
namesLoader.loadFile(suffixList, "/resources/suffixes/enchantments/" + f.getName());
suffixes.put(NameType.ENCHANTMENT_SUFFIX.getFormat() + f.getName().replace(".txt", ""), suffixList);
numOfLoadedSuffixes += suffixList.size();
}
}
}
debugPrinter.debug(Level.INFO, "Loaded suffixes: " + numOfLoadedSuffixes);
NameMap.getInstance().putAll(suffixes);
}
private void loadPrefixes() {
Map<String, List<String>> prefixes = new HashMap<>();
File prefixFolder = new File(getDataFolder(), "/resources/prefixes/");
if (!prefixFolder.exists() && !prefixFolder.mkdirs()) {
return;
}
List<String> generalPrefixes = new ArrayList<>();
namesLoader.loadFile(generalPrefixes, "/resources/prefixes/general.txt");
prefixes.put(NameType.GENERAL_PREFIX.getFormat(), generalPrefixes);
int numOfLoadedPrefixes = generalPrefixes.size();
File tierPrefixFolder = new File(prefixFolder, "/tiers/");
if (tierPrefixFolder.exists() && tierPrefixFolder.isDirectory()) {
for (File f : tierPrefixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> prefixList = new ArrayList<>();
namesLoader.loadFile(prefixList, "/resources/prefixes/tiers/" + f.getName());
prefixes.put(NameType.TIER_PREFIX.getFormat() + f.getName().replace(".txt", ""), prefixList);
numOfLoadedPrefixes += prefixList.size();
}
}
}
File materialPrefixFolder = new File(prefixFolder, "/materials/");
if (materialPrefixFolder.exists() && materialPrefixFolder.isDirectory()) {
for (File f : materialPrefixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> prefixList = new ArrayList<>();
namesLoader.loadFile(prefixList, "/resources/prefixes/materials/" + f.getName());
prefixes.put(NameType.MATERIAL_PREFIX.getFormat() + f.getName().replace(".txt", ""), prefixList);
numOfLoadedPrefixes += prefixList.size();
}
}
}
File enchantmentPrefixFolder = new File(prefixFolder, "/enchantments/");
if (enchantmentPrefixFolder.exists() && enchantmentPrefixFolder.isDirectory()) {
for (File f : enchantmentPrefixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> prefixList = new ArrayList<>();
namesLoader.loadFile(prefixList, "/resources/prefixes/enchantments/" + f.getName());
prefixes.put(NameType.ENCHANTMENT_PREFIX.getFormat() + f.getName().replace(".txt", ""), prefixList);
numOfLoadedPrefixes += prefixList.size();
}
}
}
debugPrinter.debug(Level.INFO, "Loaded prefixes: " + numOfLoadedPrefixes);
NameMap.getInstance().putAll(prefixes);
}
}
|
MythicDrops/src/main/java/net/nunnerycode/bukkit/mythicdrops/MythicDropsPlugin.java
|
package net.nunnerycode.bukkit.mythicdrops;
import com.conventnunnery.libraries.config.CommentedConventYamlConfiguration;
import com.conventnunnery.libraries.config.ConventYamlConfiguration;
import com.modcrafting.diablodrops.name.NamesLoader;
import net.nunnerycode.bukkit.mythicdrops.api.MythicDrops;
import net.nunnerycode.bukkit.mythicdrops.api.enchantments.MythicEnchantment;
import net.nunnerycode.bukkit.mythicdrops.api.items.CustomItem;
import net.nunnerycode.bukkit.mythicdrops.api.names.NameType;
import net.nunnerycode.bukkit.mythicdrops.api.settings.ConfigSettings;
import net.nunnerycode.bukkit.mythicdrops.api.tiers.Tier;
import net.nunnerycode.bukkit.mythicdrops.commands.MythicDropsCommand;
import net.nunnerycode.bukkit.mythicdrops.items.CustomItemBuilder;
import net.nunnerycode.bukkit.mythicdrops.items.CustomItemMap;
import net.nunnerycode.bukkit.mythicdrops.names.NameMap;
import net.nunnerycode.bukkit.mythicdrops.settings.MythicConfigSettings;
import net.nunnerycode.bukkit.mythicdrops.spawning.ItemSpawningListener;
import net.nunnerycode.bukkit.mythicdrops.tiers.MythicTierBuilder;
import net.nunnerycode.bukkit.mythicdrops.tiers.TierMap;
import net.nunnerycode.bukkit.mythicdrops.utils.ChatColorUtil;
import net.nunnerycode.bukkit.mythicdrops.utils.TierUtil;
import net.nunnerycode.java.libraries.cannonball.DebugPrinter;
import org.bukkit.Bukkit;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.enchantments.EnchantmentWrapper;
import org.bukkit.entity.EntityType;
import org.bukkit.material.MaterialData;
import org.bukkit.plugin.java.JavaPlugin;
import se.ranzdo.bukkit.methodcommand.CommandHandler;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
public final class MythicDropsPlugin extends JavaPlugin implements MythicDrops {
private static MythicDrops _INSTANCE;
private ConfigSettings configSettings;
private DebugPrinter debugPrinter;
private CommentedConventYamlConfiguration configYAML;
private CommentedConventYamlConfiguration customItemYAML;
private CommentedConventYamlConfiguration itemGroupYAML;
private CommentedConventYamlConfiguration languageYAML;
private CommentedConventYamlConfiguration tierYAML;
private CommentedConventYamlConfiguration creatureSpawningYAML;
private NamesLoader namesLoader;
private CommandHandler commandHandler;
public static MythicDrops getInstance() {
return _INSTANCE;
}
@Override
public void onEnable() {
_INSTANCE = this;
debugPrinter = new DebugPrinter(getDataFolder().getPath(), "debug.log");
configSettings = new MythicConfigSettings();
namesLoader = new NamesLoader(this);
unpackConfigurationFiles(new String[]{"config.yml", "customItems.yml", "itemGroups.yml", "language.yml",
"tier.yml", "creatureSpawning.yml"}, false);
configYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "config.yml"),
YamlConfiguration.loadConfiguration(getResource("config.yml")).getString("version"));
configYAML.options().backupOnUpdate(true);
configYAML.options().updateOnLoad(true);
configYAML.load();
customItemYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "customItems.yml"),
YamlConfiguration.loadConfiguration(getResource("customItems.yml")).getString("version"));
customItemYAML.options().backupOnUpdate(true);
customItemYAML.options().updateOnLoad(true);
customItemYAML.load();
itemGroupYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "itemGroups.yml"),
YamlConfiguration.loadConfiguration(getResource("itemGroups.yml")).getString("version"));
itemGroupYAML.options().backupOnUpdate(true);
itemGroupYAML.options().updateOnLoad(true);
itemGroupYAML.load();
languageYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "language.yml"),
YamlConfiguration.loadConfiguration(getResource("language.yml")).getString("version"));
languageYAML.options().backupOnUpdate(true);
languageYAML.options().updateOnLoad(true);
languageYAML.load();
tierYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "tier.yml"),
YamlConfiguration.loadConfiguration(getResource("tier.yml")).getString("version"));
tierYAML.options().backupOnUpdate(true);
tierYAML.options().updateOnLoad(true);
tierYAML.load();
creatureSpawningYAML = new CommentedConventYamlConfiguration(new File(getDataFolder(), "creatureSpawning.yml"),
YamlConfiguration.loadConfiguration(getResource("creatureSpawning.yml")).getString("version"));
creatureSpawningYAML.options().pathSeparator('/');
creatureSpawningYAML.options().backupOnUpdate(true);
creatureSpawningYAML.options().updateOnLoad(true);
creatureSpawningYAML.load();
writeResourceFiles();
debugInformation();
reloadTiers();
reloadSettings();
reloadCustomItems();
reloadNames();
commandHandler = new CommandHandler(this);
commandHandler.registerCommands(new MythicDropsCommand(this));
Bukkit.getPluginManager().registerEvents(new ItemSpawningListener(this), this);
debugPrinter.debug(Level.INFO, "v" + getDescription().getVersion() + " enabled");
}
private void debugInformation() {
List<String> strings= new ArrayList<>();
for (Enchantment e : Enchantment.values()) {
strings.add(new EnchantmentWrapper(e.getId()).getName());
}
debugPrinter.debug(Level.INFO, "Enchantments: " + strings.toString());
strings.clear();
for (EntityType et : EntityType.values()) {
strings.add(et.name());
}
debugPrinter.debug(Level.INFO, "EntityTypes: " + strings.toString());
}
private void writeResourceFiles() {
namesLoader.writeDefault("/resources/lore/general.txt", false, true);
namesLoader.writeDefault("/resources/lore/enchantments/damage_all.txt", false, true);
namesLoader.writeDefault("/resources/lore/materials/diamond_sword.txt", false, true);
namesLoader.writeDefault("/resources/lore/tiers/legendary.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/general.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/enchantments/damage_all.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/materials/diamond_sword.txt", false, true);
namesLoader.writeDefault("/resources/prefixes/tiers/legendary.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/general.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/enchantments/damage_all.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/materials/diamond_sword.txt", false, true);
namesLoader.writeDefault("/resources/suffixes/tiers/legendary.txt", false, true);
}
private void unpackConfigurationFiles(String[] configurationFiles, boolean overwrite) {
for (String s : configurationFiles) {
YamlConfiguration yc = YamlConfiguration.loadConfiguration(getResource(s));
try {
File f = new File(getDataFolder(), s);
if (!f.exists()) {
yc.save(f);
continue;
}
if (overwrite) {
yc.save(f);
}
} catch (IOException e) {
getLogger().warning("Could not unpack " + s);
}
}
}
@Override
public ConfigSettings getConfigSettings() {
return configSettings;
}
@Override
public DebugPrinter getDebugPrinter() {
return debugPrinter;
}
@Override
public ConventYamlConfiguration getConfigYAML() {
return configYAML;
}
@Override
public ConventYamlConfiguration getCustomItemYAML() {
return customItemYAML;
}
@Override
public ConventYamlConfiguration getItemGroupYAML() {
return itemGroupYAML;
}
@Override
public ConventYamlConfiguration getLanguageYAML() {
return languageYAML;
}
@Override
public ConventYamlConfiguration getTierYAML() {
return tierYAML;
}
@Override
public void reloadSettings() {
MythicConfigSettings mcs = new MythicConfigSettings();
if (configYAML != null) {
mcs.setAutoUpdate(configYAML.getBoolean("options.autoUpdate", false));
mcs.setDebugMode(configYAML.getBoolean("options.debugMode", false));
mcs.setScriptsDirectory(configYAML.getString("options.scriptsDirectory", "scripts"));
mcs.setItemDisplayNameFormat(configYAML.getString("display.itemDisplayNameFormat",
"%generalprefix% %generalsuffix%"));
mcs.setRandomLoreEnabled(configYAML.getBoolean("display.tooltips.randomLoreEnabled", false));
mcs.setRandomLoreChance(configYAML.getDouble("display.tooltips.randomLoreChance", 0.25));
mcs.getTooltipFormat().addAll(configYAML.getStringList("display.tooltips.format"));
}
if (itemGroupYAML != null && itemGroupYAML.isConfigurationSection("itemGroups")) {
ConfigurationSection idCS = itemGroupYAML.getConfigurationSection("itemGroups");
if (idCS.isConfigurationSection("toolGroups")) {
List<String> toolGroupList = new ArrayList<>();
ConfigurationSection toolCS = idCS.getConfigurationSection("toolGroups");
for (String toolKind : toolCS.getKeys(false)) {
List<String> idList = toolCS.getStringList(toolKind);
toolGroupList.add(toolKind + " (" + idList.size() + ")");
mcs.getItemTypesWithIds().put(toolKind.toLowerCase(), idList);
mcs.getToolTypes().add(toolKind.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded tool groups: " + toolGroupList.toString());
}
if (idCS.isConfigurationSection("armorGroups")) {
List<String> armorGroupList = new ArrayList<>();
ConfigurationSection armorCS = idCS.getConfigurationSection("armorGroups");
for (String armorKind : armorCS.getKeys(false)) {
List<String> idList = armorCS.getStringList(armorKind);
armorGroupList.add(armorKind + " (" + idList.size() + ")");
mcs.getItemTypesWithIds().put(armorKind.toLowerCase(), idList);
mcs.getArmorTypes().add(armorKind.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded armor groups: " + armorGroupList.toString());
}
if (idCS.isConfigurationSection("materialGroups")) {
List<String> materialGroupList = new ArrayList<String>();
ConfigurationSection materialCS = idCS.getConfigurationSection("materialGroups");
for (String materialKind : materialCS.getKeys(false)) {
List<String> idList = materialCS.getStringList(materialKind);
materialGroupList.add(materialKind + " (" + idList.size() + ")");
mcs.getMaterialTypesWithIds().put(materialKind.toLowerCase(), idList);
mcs.getMaterialTypes().add(materialKind.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded material groups: " + materialGroupList.toString());
}
}
if (languageYAML != null) {
for (String s : languageYAML.getKeys(true)) {
if (languageYAML.isConfigurationSection(s)) {
continue;
}
mcs.getLanguageMap().put(s, languageYAML.getString(s, s));
}
}
if (creatureSpawningYAML != null) {
mcs.setCanMobsPickUpEquipment(creatureSpawningYAML.getBoolean("options/can-mobs-pick-up-equipment", true));
mcs.setBlankMobSpawnEnabled(creatureSpawningYAML.getBoolean("options/blank-mob-spawn.enabled", false));
mcs.setBlankMobSpawnSkeletonsSpawnWithBows(!creatureSpawningYAML.getBoolean("options/blank-mob-spawn" +
"/skeletons-no-bow", false));
mcs.setGlobalSpawnChance(creatureSpawningYAML.getDouble("globalSpawnChance", 0.25));
mcs.setPreventCustom(creatureSpawningYAML.getBoolean("spawnPrevention/custom", true));
mcs.setPreventSpawner(creatureSpawningYAML.getBoolean("spawnPrevention/spawner", true));
mcs.setPreventSpawnEgg(creatureSpawningYAML.getBoolean("spawnPrevention/spawnEgg", true));
if (creatureSpawningYAML.isConfigurationSection("spawnPrevention/aboveY")) {
ConfigurationSection cs = creatureSpawningYAML.getConfigurationSection("spawnPrevention/aboveY");
for (String wn : cs.getKeys(false)) {
if (cs.isConfigurationSection(wn)) {
continue;
}
mcs.setSpawnHeightLimit(wn, cs.getInt(wn, 255));
}
}
mcs.setCustomItemsSpawn(creatureSpawningYAML.getBoolean("customItems/spawn", true));
mcs.setOnlyCustomItemsSpawn(creatureSpawningYAML.getBoolean("customItems/onlySpawn", false));
mcs.setCustomItemSpawnChance(creatureSpawningYAML.getDouble("customItems/chance", 0.05));
if (creatureSpawningYAML.isConfigurationSection("tierDrops")) {
ConfigurationSection cs = creatureSpawningYAML.getConfigurationSection("tierDrops");
for (String key : cs.getKeys(false)) {
if (cs.isConfigurationSection(key)) {
continue;
}
List<String> strings = cs.getStringList(key);
EntityType et = null;
try {
et = EntityType.valueOf(key);
} catch (Exception e) {
continue;
}
Set<Tier> tiers = new HashSet<>(TierUtil.getTiersFromStrings(strings));
debugPrinter.debug(Level.INFO, et.name() + " | " + TierUtil.getStringsFromTiers(tiers).toString());
mcs.setEntityTypeTiers(et, tiers);
}
}
if (creatureSpawningYAML.isConfigurationSection("spawnWithDropChance")) {
ConfigurationSection cs = creatureSpawningYAML.getConfigurationSection("spawnWithDropChance");
for (String key : cs.getKeys(false)) {
if (cs.isConfigurationSection(key)) {
continue;
}
EntityType et = null;
try {
et = EntityType.valueOf(key);
} catch (Exception e) {
continue;
}
double d = cs.getDouble(key, 0D);
mcs.setEntityTypeChance(et, d);
}
}
}
this.configSettings = mcs;
}
@Override
public void reloadTiers() {
TierMap.getInstance().clear();
CommentedConventYamlConfiguration c = tierYAML;
if (c == null) {
return;
}
List<String> loadedTierNames = new ArrayList<>();
for (String key : c.getKeys(false)) {
// Check if the key has other fields under it and if not, move on to the next
if (!c.isConfigurationSection(key)) {
continue;
}
ConfigurationSection cs = c.getConfigurationSection(key);
MythicTierBuilder builder = new MythicTierBuilder(key.toLowerCase());
builder.withDisplayName(cs.getString("displayName", key));
builder.withDisplayColor(ChatColorUtil.getChatColorOrFallback(cs.getString("displayColor"),
ChatColorUtil.getRandomChatColor()));
builder.withIdentificationColor(ChatColorUtil.getChatColorOrFallback(cs.getString("identificationColor")
, ChatColorUtil.getRandomChatColor()));
ConfigurationSection enchCS = cs.getConfigurationSection("enchantments");
if (enchCS != null) {
builder.withSafeBaseEnchantments(enchCS.getBoolean("safeBaseEnchantments", true));
builder.withSafeBonusEnchantments(enchCS.getBoolean("safeBonusEnchantments", true));
builder.withHighBaseEnchantments(enchCS.getBoolean("allowHighBaseEnchantments", true));
builder.withHighBonusEnchantments(enchCS.getBoolean("allowHighBonusEnchantments", true));
builder.withMinimumBonusEnchantments(enchCS.getInt("minimumBonusEnchantments", 0));
builder.withMaximumBonusEnchantments(enchCS.getInt("maximumBonusEnchantments", 0));
Set<MythicEnchantment> baseEnchantments = new HashSet<>();
List<String> baseEnchantStrings = enchCS.getStringList("baseEnchantments");
for (String s : baseEnchantStrings) {
MythicEnchantment me = MythicEnchantment.fromString(s);
if (me != null) {
baseEnchantments.add(me);
}
}
builder.withBaseEnchantments(baseEnchantments);
Set<MythicEnchantment> bonusEnchantments = new HashSet<>();
List<String> bonusEnchantStrings = enchCS.getStringList("bonusEnchantments");
for (String s : bonusEnchantStrings) {
MythicEnchantment me = MythicEnchantment.fromString(s);
if (me != null) {
bonusEnchantments.add(me);
}
}
builder.withBonusEnchantments(bonusEnchantments);
}
ConfigurationSection loreCS = cs.getConfigurationSection("lore");
if (loreCS != null) {
builder.withMinimumBonusLore(loreCS.getInt("minimumBonusLore", 0));
builder.withMaximumBonusLore(loreCS.getInt("maximumBonusLore", 0));
builder.withBaseLore(loreCS.getStringList("baseLore"));
builder.withBonusLore(loreCS.getStringList("bonusLore"));
}
builder.withMinimumDurabilityPercentage(cs.getDouble("minimumDurabilityPercentage", 1.0));
builder.withMaximumDurabilityPercentage(cs.getDouble("maximumDurabilityPercentage", 1.0));
builder.withMinimumSockets(cs.getInt("minimumSockets", 0));
builder.withMaximumSockets(cs.getInt("maximumSockets", 0));
builder.withAllowedItemGroups(cs.getStringList("itemTypes.allowedGroups"));
builder.withDisallowedItemGroups(cs.getStringList("itemTypes.disallowedGroups"));
builder.withAllowedItemIds(cs.getStringList("itemTypes.allowedItemIds"));
builder.withDisallowedItemIds(cs.getStringList("itemTypes.disallowedItemIds"));
if (cs.isConfigurationSection("chanceToSpawnOnAMonster")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
for (String k : cs.getConfigurationSection("chanceToSpawnOnAMonster").getKeys(false)) {
chanceToSpawnMap.put(k, cs.getDouble("chanceToSpawnOnAMonster." + k, 0));
chanceToSpawnMap.put("default", cs.getDouble("chanceToSpawnOnAMonster"));
}
builder.withWorldSpawnChanceMap(chanceToSpawnMap);
} else if (cs.isSet("chanceToSpawnOnAMonster")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
chanceToSpawnMap.put("default", cs.getDouble("chanceToSpawnOnAMonster"));
builder.withWorldSpawnChanceMap(chanceToSpawnMap);
}
if (cs.isConfigurationSection("chanceToDropOnMonsterDeath")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
for (String k : cs.getConfigurationSection("chanceToDropOnMonsterDeath").getKeys(false)) {
chanceToSpawnMap.put(k, cs.getDouble("chanceToDropOnMonsterDeath." + k, 1.0));
chanceToSpawnMap.put("default", cs.getDouble("chanceToDropOnMonsterDeath", 1.0));
}
builder.withWorldDropChanceMap(chanceToSpawnMap);
} else if (cs.isSet("chanceToDropOnMonsterDeath")) {
Map<String, Double> chanceToSpawnMap = new HashMap<>();
chanceToSpawnMap.put("default", cs.getDouble("chanceToDropOnMonsterDeath", 1.0));
builder.withWorldDropChanceMap(chanceToSpawnMap);
}
Tier t = builder.build();
if (t.getDisplayColor() == t.getIdentificationColor()) {
debugPrinter.debug(Level.INFO, "Cannot load " + t.getName() + " due to displayColor and " +
"identificationColor being the same");
continue;
}
TierMap.getInstance().put(key.toLowerCase(), t);
loadedTierNames.add(key.toLowerCase());
}
debugPrinter.debug(Level.INFO, "Loaded tiers: " + loadedTierNames.toString());
}
@Override
public void reloadCustomItems() {
CustomItemMap.getInstance().clear();
CommentedConventYamlConfiguration c = customItemYAML;
if (c == null) {
return;
}
List<String> loadedCustomItemsNames = new ArrayList<>();
for (String key : c.getKeys(false)) {
if (!c.isConfigurationSection(key)) {
continue;
}
ConfigurationSection cs = c.getConfigurationSection(key);
CustomItemBuilder builder = new CustomItemBuilder(key);
MaterialData materialData = new MaterialData(cs.getInt("materialID", 0), (byte) cs.getInt("materialData",
0));
if (materialData.getItemTypeId() == 0) {
continue;
}
builder.withMaterialData(materialData);
builder.withDisplayName(cs.getString("displayName", key));
builder.withLore(cs.getStringList("lore"));
builder.withChanceToBeGivenToMonster(cs.getDouble("chanceToBeGivenToAMonster", 0));
builder.withChanceToDropOnDeath(cs.getDouble("chanceToDropOnDeath", 0));
Map<Enchantment, Integer> enchantments = new HashMap<>();
if (cs.isConfigurationSection("enchantments")) {
for (String ench : cs.getConfigurationSection("enchantments").getKeys(false)) {
Enchantment enchantment = Enchantment.getByName(ench);
if (enchantment == null) {
continue;
}
enchantments.put(enchantment, cs.getInt("enchantments." + ench));
}
}
builder.withEnchantments(enchantments);
CustomItem ci = builder.build();
CustomItemMap.getInstance().put(key, ci);
loadedCustomItemsNames.add(key);
}
debugPrinter.debug(Level.INFO, "Loaded custom items: " + loadedCustomItemsNames.toString());
}
@Override
public void reloadNames() {
NameMap.getInstance().clear();
loadPrefixes();
loadSuffixes();
loadLore();
}
@Override
public CommandHandler getCommandHandler() {
return commandHandler;
}
private void loadLore() {
Map<String, List<String>> lore = new HashMap<>();
File loreFolder = new File(getDataFolder(), "/resources/lore/");
if (!loreFolder.exists() && !loreFolder.mkdirs()) {
return;
}
List<String> generalLore = new ArrayList<>();
namesLoader.loadFile(generalLore, "/resources/lore/general.txt");
lore.put(NameType.GENERAL_LORE.getFormat(), generalLore);
int numOfLoadedLore = generalLore.size();
File tierLoreFolder = new File(loreFolder, "/tiers/");
if (tierLoreFolder.exists() && tierLoreFolder.isDirectory()) {
for (File f : tierLoreFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> loreList = new ArrayList<>();
namesLoader.loadFile(loreList, "/resources/lore/tiers/" + f.getName());
lore.put(NameType.TIER_LORE.getFormat() + f.getName().replace(".txt", ""), loreList);
numOfLoadedLore += loreList.size();
}
}
}
File materialLoreFolder = new File(loreFolder, "/materials/");
if (materialLoreFolder.exists() && materialLoreFolder.isDirectory()) {
for (File f : materialLoreFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> loreList = new ArrayList<>();
namesLoader.loadFile(loreList, "/resources/lore/materials/" + f.getName());
lore.put(NameType.MATERIAL_LORE.getFormat() + f.getName().replace(".txt", ""), loreList);
numOfLoadedLore += loreList.size();
}
}
}
File enchantmentLoreFolder = new File(loreFolder, "/enchantments/");
if (enchantmentLoreFolder.exists() && enchantmentLoreFolder.isDirectory()) {
for (File f : enchantmentLoreFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> loreList = new ArrayList<>();
namesLoader.loadFile(loreList, "/resources/lore/enchantments/" + f.getName());
lore.put(NameType.ENCHANTMENT_LORE.getFormat() + f.getName().replace(".txt", ""), loreList);
numOfLoadedLore += loreList.size();
}
}
}
debugPrinter.debug(Level.INFO, "Loaded lore: " + numOfLoadedLore);
NameMap.getInstance().putAll(lore);
}
private void loadSuffixes() {
Map<String, List<String>> suffixes = new HashMap<>();
File suffixFolder = new File(getDataFolder(), "/resources/suffixes/");
if (!suffixFolder.exists() && !suffixFolder.mkdirs()) {
return;
}
List<String> generalSuffixes = new ArrayList<>();
namesLoader.loadFile(generalSuffixes, "/resources/suffixes/general.txt");
suffixes.put(NameType.GENERAL_SUFFIX.getFormat(), generalSuffixes);
int numOfLoadedSuffixes = generalSuffixes.size();
File tierSuffixFolder = new File(suffixFolder, "/tiers/");
if (tierSuffixFolder.exists() && tierSuffixFolder.isDirectory()) {
for (File f : tierSuffixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> suffixList = new ArrayList<>();
namesLoader.loadFile(suffixList, "/resources/suffixes/tiers/" + f.getName());
suffixes.put(NameType.TIER_SUFFIX.getFormat() + f.getName().replace(".txt", ""), suffixList);
numOfLoadedSuffixes += suffixList.size();
}
}
}
File materialSuffixFolder = new File(suffixFolder, "/materials/");
if (materialSuffixFolder.exists() && materialSuffixFolder.isDirectory()) {
for (File f : materialSuffixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> suffixList = new ArrayList<>();
namesLoader.loadFile(suffixList, "/resources/suffixes/materials/" + f.getName());
suffixes.put(NameType.MATERIAL_SUFFIX.getFormat() + f.getName().replace(".txt", ""), suffixList);
numOfLoadedSuffixes += suffixList.size();
}
}
}
File enchantmentSuffixFolder = new File(suffixFolder, "/enchantments/");
if (enchantmentSuffixFolder.exists() && enchantmentSuffixFolder.isDirectory()) {
for (File f : enchantmentSuffixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> suffixList = new ArrayList<>();
namesLoader.loadFile(suffixList, "/resources/suffixes/enchantments/" + f.getName());
suffixes.put(NameType.ENCHANTMENT_SUFFIX.getFormat() + f.getName().replace(".txt", ""), suffixList);
numOfLoadedSuffixes += suffixList.size();
}
}
}
debugPrinter.debug(Level.INFO, "Loaded suffixes: " + numOfLoadedSuffixes);
NameMap.getInstance().putAll(suffixes);
}
private void loadPrefixes() {
Map<String, List<String>> prefixes = new HashMap<>();
File prefixFolder = new File(getDataFolder(), "/resources/prefixes/");
if (!prefixFolder.exists() && !prefixFolder.mkdirs()) {
return;
}
List<String> generalPrefixes = new ArrayList<>();
namesLoader.loadFile(generalPrefixes, "/resources/prefixes/general.txt");
prefixes.put(NameType.GENERAL_PREFIX.getFormat(), generalPrefixes);
int numOfLoadedPrefixes = generalPrefixes.size();
File tierPrefixFolder = new File(prefixFolder, "/tiers/");
if (tierPrefixFolder.exists() && tierPrefixFolder.isDirectory()) {
for (File f : tierPrefixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> prefixList = new ArrayList<>();
namesLoader.loadFile(prefixList, "/resources/prefixes/tiers/" + f.getName());
prefixes.put(NameType.TIER_PREFIX.getFormat() + f.getName().replace(".txt", ""), prefixList);
numOfLoadedPrefixes += prefixList.size();
}
}
}
File materialPrefixFolder = new File(prefixFolder, "/materials/");
if (materialPrefixFolder.exists() && materialPrefixFolder.isDirectory()) {
for (File f : materialPrefixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> prefixList = new ArrayList<>();
namesLoader.loadFile(prefixList, "/resources/prefixes/materials/" + f.getName());
prefixes.put(NameType.MATERIAL_PREFIX.getFormat() + f.getName().replace(".txt", ""), prefixList);
numOfLoadedPrefixes += prefixList.size();
}
}
}
File enchantmentPrefixFolder = new File(prefixFolder, "/enchantments/");
if (enchantmentPrefixFolder.exists() && enchantmentPrefixFolder.isDirectory()) {
for (File f : enchantmentPrefixFolder.listFiles()) {
if (f.getName().endsWith(".txt")) {
List<String> prefixList = new ArrayList<>();
namesLoader.loadFile(prefixList, "/resources/prefixes/enchantments/" + f.getName());
prefixes.put(NameType.ENCHANTMENT_PREFIX.getFormat() + f.getName().replace(".txt", ""), prefixList);
numOfLoadedPrefixes += prefixList.size();
}
}
}
debugPrinter.debug(Level.INFO, "Loaded prefixes: " + numOfLoadedPrefixes);
NameMap.getInstance().putAll(prefixes);
}
}
|
fixing a problem with loading Tiers
|
MythicDrops/src/main/java/net/nunnerycode/bukkit/mythicdrops/MythicDropsPlugin.java
|
fixing a problem with loading Tiers
|
|
Java
|
mit
|
57663dac8912a84c3580dead4d8506a5d7b5d6cc
| 0
|
Gilga/MedProg-Messenger-Tests
|
package de.sb.messenger.rest;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE;
import static javax.ws.rs.core.MediaType.TEXT_PLAIN;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.net.URI;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import org.junit.Before;
import org.junit.Test;
import de.sb.messenger.persistence.BaseEntity;
import de.sb.messenger.persistence.Document;
import de.sb.messenger.persistence.Message;
import de.sb.messenger.persistence.Person;
import de.sb.messenger.persistence.Person.Group;
public class MessageServiceTest extends ServiceTest {
@Test
public void testCriteriaQueries() {
WebTarget webTarget = newWebTarget("ines.bergmann@web.de", "ines");
Response res = webTarget.path("people/2").request().accept(APPLICATION_JSON).get();
Person returnedPerson = res.readEntity(Person.class);
BaseEntity baseEntity = new BaseEntity();
Message message = new Message(returnedPerson, baseEntity, "Hi there!");
Response response = webTarget.path("messages").queryParam("subjectReference", 2L).request()
.put(Entity.text(message.getBody()));
long idMsg = response.readEntity(Long.class);
assertEquals(200, response.getStatus());
assertNotEquals(0L, idMsg);
//getWasteBasket().add(idMsg);
}
@Test
public void testIdentityQueries() {
/*
* Test getMessage
*/
WebTarget webTarget = newWebTarget("ines.bergmann@web.de", "ines");
// cannot find the msg just put, so created a msg with id 11
Response response = webTarget.path("messages/11").request().accept(APPLICATION_JSON).get();
Message returnedMsg = response.readEntity(Message.class);
assertEquals(200, response.getStatus());
assertNotNull(returnedMsg);
assertEquals(APPLICATION_JSON_TYPE, response.getMediaType());
/*
* Test getAuthor
*/
response = webTarget.path("messages/11/author").request().accept(APPLICATION_JSON).get();
Person author = response.readEntity(Person.class);
assertTrue(response.getStatus() == 200);
assertNotNull(author);
assertEquals("Zeta", author.getName().getGiven());
assertEquals(APPLICATION_JSON_TYPE, response.getMediaType());
/*
* Test getSubject
*/
response = webTarget.path("messages/11/subject").request().accept(APPLICATION_JSON).get();
BaseEntity subject = response.readEntity(BaseEntity.class);
assertTrue(response.getStatus() == 200);
assertNotNull(subject);
assertEquals(APPLICATION_JSON_TYPE, response.getMediaType());
}
}
|
test/de/sb/messenger/rest/MessageServiceTest.java
|
package de.sb.messenger.rest;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON_TYPE;
import static javax.ws.rs.core.MediaType.TEXT_PLAIN;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.net.URI;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Response;
import org.junit.Before;
import org.junit.Test;
import de.sb.messenger.persistence.BaseEntity;
import de.sb.messenger.persistence.Document;
import de.sb.messenger.persistence.Message;
import de.sb.messenger.persistence.Person;
import de.sb.messenger.persistence.Person.Group;
public class MessageServiceTest extends ServiceTest {
Client client;
// static private final URI SERVICE_URI =
// URI.create("http://localhost:8001/e");
// String usernameAndPassword;
// String authorizationHeaderName;
// String authorizationHeaderValue;
WebTarget webTarget;
Person returnedPerson;
Message message;
long idMsg ;
@Before
public void setupBefore() {
webTarget = newWebTarget("ines.bergmann@web.de", "ines");
}
@Test
public void testCriteriaQueries() {
Response res = webTarget.path("people/2").request().accept(APPLICATION_JSON).get();
returnedPerson = res.readEntity(Person.class);
BaseEntity baseEntity = new BaseEntity();
message = new Message(returnedPerson, baseEntity, "Hi there!");
Response response = webTarget.request().accept(TEXT_PLAIN).header("Authorization", "authorization")
.put(Entity.json(message));
//idMsg = response.readEntity(Long.class);
//idMsg = Long.parseLong(response.readEntity(String.class));
//response what type of a response has to be here
assertEquals(200,response.getStatus());
//System.out.println(response.getStatus());
//assertNotEquals(0, idMsg);
}
@Test
public void testIdentityQueries() {
/*
* Test getMessage
*/
Response response = webTarget.path("messages/11").request().accept(APPLICATION_JSON).get();
Message returnedMsg = response.readEntity(Message.class);
// TODO how to get the msg ID
assertNotNull(returnedMsg);
assertTrue(response.getStatus() == 200);
assertEquals(APPLICATION_JSON_TYPE, response.getMediaType());
/*
* Test getAuthor
*/
response = webTarget.path("messages/idMsg/author").request().accept(APPLICATION_JSON).get();
returnedMsg = response.readEntity(Message.class);
assertNotNull(returnedMsg);
assertEquals("Ines", returnedMsg.getAuthor().getName().getGiven());
assertTrue(response.getStatus() == 200);
assertEquals(APPLICATION_JSON_TYPE, response.getMediaType());
/*
* Test getSubject
*/
response = webTarget.path("messages/idMsg/subject").request().accept(APPLICATION_JSON).get();
returnedMsg = response.readEntity(Message.class);
assertNotNull(returnedMsg);
assertEquals(message.getSubject(), returnedMsg.getSubject());
assertTrue(response.getStatus() == 200);
assertEquals(APPLICATION_JSON_TYPE, response.getMediaType());
this.getWasteBasket().add(message.getIdentiy());
}
// links
// https://dennis-xlc.gitbooks.io/restful-java-with-jax-rs-2-0-2rd-edition/en/part1/chapter8/client_and_web_target.html
// authorization -
// http://www.developerscrappad.com/2364/java/java-ee/rest-jax-rs/how-to-perform-http-basic-access-authentication-with-jax-rs-rest-client/
}
|
fixed PUT test
Msg tests run green.
Removed global variables.
Had to create a msg with id 11 and author Zeta in DB to be able to test
|
test/de/sb/messenger/rest/MessageServiceTest.java
|
fixed PUT test
|
|
Java
|
mit
|
eab4504172ff43b0461a4f7c3709f9d7ce3c6955
| 0
|
oaplatform/oap,oaplatform/oap
|
/*
* The MIT License (MIT)
*
* Copyright (c) Open Application Platform Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package oap.security.acl;
import lombok.val;
import oap.storage.mongo.Migration;
import oap.storage.mongo.MongoClient;
import oap.testng.AbstractTest;
import oap.testng.Env;
import org.bson.types.ObjectId;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static com.mongodb.client.model.Filters.eq;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Created by igor.petrenko on 22.12.2017.
*/
public class AclRoleStorageTest extends AbstractTest {
private AclRoleStorage storage;
private MongoClient mongoClient;
@Override
@BeforeMethod
public void beforeMethod() {
val dbName = "db" + Env.teamcityBuildPrefix().replace( ".", "_" );
mongoClient = new MongoClient( Env.getEnvOrDefault( "MONGO_HOST", "localhost" ), 27017, dbName, Migration.NONE );
mongoClient.database.drop();
storage = new AclRoleStorage( mongoClient, "roles" );
}
@Test
public void testId() {
val role = storage.store( new AclRole( "role1", singletonList( "test.permission" ) ) );
storage.fsync();
val role2 = storage.collection.find( eq( "_id", role.getId() ) ).first();
assertThat( role2.object ).isEqualTo( role );
}
@Override
@AfterMethod
public void afterMethod() {
mongoClient.database.drop();
storage.close();
mongoClient.close();
}
}
|
oap-security/oap-security-mongo/src/test/java/oap/security/acl/AclRoleStorageTest.java
|
/*
* The MIT License (MIT)
*
* Copyright (c) Open Application Platform Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package oap.security.acl;
import lombok.val;
import oap.storage.mongo.Migration;
import oap.storage.mongo.MongoClient;
import oap.testng.AbstractTest;
import oap.testng.Env;
import org.bson.types.ObjectId;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static com.mongodb.client.model.Filters.eq;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Created by igor.petrenko on 22.12.2017.
*/
public class AclRoleStorageTest extends AbstractTest {
private AclRoleStorage storage;
private MongoClient mongoClient;
@Override
@BeforeMethod
public void beforeMethod() {
val dbName = "db" + Env.teamcityBuildPrefix().replace( ".", "_" );
mongoClient = new MongoClient( Env.getEnvOrDefault( "MONGO_HOST", "localhost" ), 27017, dbName, Migration.NONE );
mongoClient.database.drop();
storage = new AclRoleStorage( mongoClient, "roles" );
}
@Test
public void testId() {
val role = storage.store( new AclRole( "role1", singletonList( "test.permission" ) ) );
storage.fsync();
val role2 = storage.collection.find( eq( "_id", new ObjectId( role.getId() ) ) ).first();
assertThat( role2.object ).isEqualTo( role );
}
@Override
@AfterMethod
public void afterMethod() {
mongoClient.database.drop();
storage.close();
mongoClient.close();
}
}
|
update: MongoStorage: ObjectId -> String
|
oap-security/oap-security-mongo/src/test/java/oap/security/acl/AclRoleStorageTest.java
|
update: MongoStorage: ObjectId -> String
|
|
Java
|
mit
|
7437d802df108634f09f696a980fb3731632ae23
| 0
|
romank0/dddsample-core,gacalves/dddsample-core,orende/dddsample-core,IzzyXie2010/dddsample-core,orende/dddsample-core,citerus/dddsample-core,IzzyXie2010/dddsample-core,stefan-ka/dddsample-core,stefan-ka/dddsample-core,loothingpogixxv/dddsample-core,romank0/dddsample-core,gacalves/dddsample-core,citerus/dddsample-core,loothingpogixxv/dddsample-core
|
package se.citerus.dddsample.domain.model.cargo;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import se.citerus.dddsample.domain.model.Specification;
import se.citerus.dddsample.domain.model.ValueObject;
import se.citerus.dddsample.domain.model.location.Location;
import java.util.Date;
/**
* Route specification.
*
*/
public class RouteSpecification implements ValueObject<RouteSpecification>, Specification<Itinerary> {
private Location origin;
private Location destination;
private Date arrivalDeadline;
/**
* Factory for creatig a route specification for a cargo, from cargo
* origin to cargo destination. Use for initial routing.
*
* @param cargo cargo
* @param arrivalDeadline arrival deadline
* @return A route specification for this cargo and arrival deadline
*/
public static RouteSpecification forCargo(Cargo cargo, Date arrivalDeadline) {
Validate.notNull(cargo);
Validate.notNull(arrivalDeadline);
return new RouteSpecification(cargo.origin(), cargo.destination(), arrivalDeadline);
}
/**
* Factory for creating a route specfication from an explicit destination,
* for rerouting a cargo.
*
* @param origin
* @param cargo
* @param arrivalDeadline
* @return
*/
public static RouteSpecification fromLocation(Location origin, Cargo cargo, Date arrivalDeadline) {
Validate.notNull(origin);
Validate.notNull(cargo);
Validate.notNull(arrivalDeadline);
return new RouteSpecification(origin, cargo.destination(), arrivalDeadline);
}
private RouteSpecification(Location origin, Location destination, Date arrivalDeadline) {
this.origin = origin;
this.destination = destination;
this.arrivalDeadline = arrivalDeadline;
}
public Location origin() {
return origin;
}
public Location destination() {
return destination;
}
public boolean isSatisfiedBy(Itinerary itinerary) {
// TODO implement
return true;
}
public boolean sameValueAs(RouteSpecification other) {
return other != null && new EqualsBuilder().
append(this.origin, other.origin).
append(this.destination, other.destination).
append(this.arrivalDeadline, other.arrivalDeadline).
isEquals();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RouteSpecification that = (RouteSpecification) o;
return sameValueAs(that);
}
@Override
public int hashCode() {
return new HashCodeBuilder().
append(this.origin).
append(this.destination).
append(this.arrivalDeadline).
toHashCode();
}
}
|
dddsample/src/main/java/se/citerus/dddsample/domain/model/cargo/RouteSpecification.java
|
package se.citerus.dddsample.domain.model.cargo;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import se.citerus.dddsample.domain.model.Specification;
import se.citerus.dddsample.domain.model.ValueObject;
import se.citerus.dddsample.domain.model.location.Location;
import java.util.Date;
/**
* Route specification
*/
public class RouteSpecification implements ValueObject<RouteSpecification>, Specification<Itinerary> {
private Location origin;
private Location destination;
private Date arrivalDeadline;
/**
* Factory method.
*
* TODO just a suggestion - could be useful with RouteSpecification.witinOneWeek(cargo) etc
*
* @param cargo cargo
* @param arrivalDeadline arrival deadline
* @return A route specification for this cargo and arrival deadline
*/
public static RouteSpecification forCargo(Cargo cargo, Date arrivalDeadline) {
Validate.notNull(cargo);
Validate.notNull(arrivalDeadline);
return new RouteSpecification(cargo.origin(), cargo.destination(), arrivalDeadline);
}
private RouteSpecification(Location origin, Location destination, Date arrivalDeadline) {
this.origin = origin;
this.destination = destination;
this.arrivalDeadline = arrivalDeadline;
}
public Location origin() {
return origin;
}
public Location destination() {
return destination;
}
public boolean isSatisfiedBy(Itinerary itinerary) {
// TODO implement
return true;
}
public boolean sameValueAs(RouteSpecification other) {
return other != null && new EqualsBuilder().
append(this.origin, other.origin).
append(this.destination, other.destination).
append(this.arrivalDeadline, other.arrivalDeadline).
isEquals();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RouteSpecification that = (RouteSpecification) o;
return sameValueAs(that);
}
@Override
public int hashCode() {
return new HashCodeBuilder().
append(this.origin).
append(this.destination).
append(this.arrivalDeadline).
toHashCode();
}
}
|
Added one more factory method, for the rerouting case
|
dddsample/src/main/java/se/citerus/dddsample/domain/model/cargo/RouteSpecification.java
|
Added one more factory method, for the rerouting case
|
|
Java
|
mit
|
66a340105b9768aae28c38662cb39c7d3128b1fe
| 0
|
daniel-beck/jenkins,pantheon-systems/jenkins,dariver/jenkins,khmarbaise/jenkins,tastatur/jenkins,hplatou/jenkins,pjanouse/jenkins,vvv444/jenkins,paulmillar/jenkins,vvv444/jenkins,DoctorQ/jenkins,azweb76/jenkins,abayer/jenkins,evernat/jenkins,rsandell/jenkins,protazy/jenkins,kohsuke/hudson,6WIND/jenkins,keyurpatankar/hudson,my7seven/jenkins,liupugong/jenkins,paulwellnerbou/jenkins,thomassuckow/jenkins,singh88/jenkins,iterate/coding-dojo,aquarellian/jenkins,mcanthony/jenkins,ndeloof/jenkins,soenter/jenkins,stephenc/jenkins,huybrechts/hudson,aldaris/jenkins,lvotypko/jenkins,daspilker/jenkins,fbelzunc/jenkins,synopsys-arc-oss/jenkins,DoctorQ/jenkins,verbitan/jenkins,svanoort/jenkins,v1v/jenkins,vijayto/jenkins,christ66/jenkins,goldchang/jenkins,jtnord/jenkins,goldchang/jenkins,mrobinet/jenkins,scoheb/jenkins,aduprat/jenkins,keyurpatankar/hudson,Wilfred/jenkins,protazy/jenkins,elkingtonmcb/jenkins,paulwellnerbou/jenkins,abayer/jenkins,fbelzunc/jenkins,jglick/jenkins,noikiy/jenkins,lvotypko/jenkins,intelchen/jenkins,nandan4/Jenkins,viqueen/jenkins,seanlin816/jenkins,khmarbaise/jenkins,my7seven/jenkins,msrb/jenkins,scoheb/jenkins,jhoblitt/jenkins,samatdav/jenkins,nandan4/Jenkins,wuwen5/jenkins,oleg-nenashev/jenkins,jcarrothers-sap/jenkins,lvotypko/jenkins3,lvotypko/jenkins2,NehemiahMi/jenkins,iterate/coding-dojo,ErikVerheul/jenkins,vjuranek/jenkins,oleg-nenashev/jenkins,akshayabd/jenkins,aheritier/jenkins,h4ck3rm1k3/jenkins,tfennelly/jenkins,azweb76/jenkins,MadsNielsen/jtemp,fbelzunc/jenkins,vijayto/jenkins,arunsingh/jenkins,jtnord/jenkins,shahharsh/jenkins,escoem/jenkins,viqueen/jenkins,christ66/jenkins,patbos/jenkins,Ykus/jenkins,MarkEWaite/jenkins,amuniz/jenkins,paulwellnerbou/jenkins,wangyikai/jenkins,petermarcoen/jenkins,MadsNielsen/jtemp,ajshastri/jenkins,elkingtonmcb/jenkins,aduprat/jenkins,aheritier/jenkins,pselle/jenkins,wuwen5/jenkins,pjanouse/jenkins,pjanouse/jenkins,iterate/coding-dojo,jglick/jenkins,everyonce/jenkins,patbos/jenkins,CodeShane/jenkins,ydubreuil/jenkins,kzantow/jenkins,Vlatombe/jenkins,rlugojr/jenkins,protazy/jenkins,csimons/jenkins,csimons/jenkins,petermarcoen/jenkins,dbroady1/jenkins,lvotypko/jenkins3,daniel-beck/jenkins,daniel-beck/jenkins,olivergondza/jenkins,tfennelly/jenkins,scoheb/jenkins,synopsys-arc-oss/jenkins,292388900/jenkins,mrobinet/jenkins,MarkEWaite/jenkins,ajshastri/jenkins,synopsys-arc-oss/jenkins,morficus/jenkins,ndeloof/jenkins,CodeShane/jenkins,guoxu0514/jenkins,csimons/jenkins,fbelzunc/jenkins,jglick/jenkins,chbiel/jenkins,patbos/jenkins,Jochen-A-Fuerbacher/jenkins,olivergondza/jenkins,tangkun75/jenkins,mattclark/jenkins,samatdav/jenkins,jpederzolli/jenkins-1,ns163/jenkins,jcarrothers-sap/jenkins,kzantow/jenkins,v1v/jenkins,maikeffi/hudson,deadmoose/jenkins,mdonohue/jenkins,tastatur/jenkins,duzifang/my-jenkins,liorhson/jenkins,sathiya-mit/jenkins,ErikVerheul/jenkins,deadmoose/jenkins,hudson/hudson-2.x,recena/jenkins,khmarbaise/jenkins,jzjzjzj/jenkins,msrb/jenkins,paulwellnerbou/jenkins,dennisjlee/jenkins,Wilfred/jenkins,deadmoose/jenkins,varmenise/jenkins,abayer/jenkins,mattclark/jenkins,SebastienGllmt/jenkins,FTG-003/jenkins,daniel-beck/jenkins,ChrisA89/jenkins,recena/jenkins,dennisjlee/jenkins,arunsingh/jenkins,rashmikanta-1984/jenkins,patbos/jenkins,ErikVerheul/jenkins,rsandell/jenkins,synopsys-arc-oss/jenkins,jenkinsci/jenkins,jpbriend/jenkins,singh88/jenkins,daspilker/jenkins,ndeloof/jenkins,ns163/jenkins,dbroady1/jenkins,my7seven/jenkins,csimons/jenkins,Wilfred/jenkins,msrb/jenkins,KostyaSha/jenkins,jenkinsci/jenkins,MadsNielsen/jtemp,azweb76/jenkins,arcivanov/jenkins,MadsNielsen/jtemp,jhoblitt/jenkins,Krasnyanskiy/jenkins,daniel-beck/jenkins,jzjzjzj/jenkins,mdonohue/jenkins,jenkinsci/jenkins,wangyikai/jenkins,mpeltonen/jenkins,kzantow/jenkins,abayer/jenkins,rashmikanta-1984/jenkins,mrooney/jenkins,intelchen/jenkins,ChrisA89/jenkins,csimons/jenkins,seanlin816/jenkins,brunocvcunha/jenkins,jpbriend/jenkins,godfath3r/jenkins,goldchang/jenkins,viqueen/jenkins,amuniz/jenkins,mdonohue/jenkins,jpbriend/jenkins,DoctorQ/jenkins,Jimilian/jenkins,aheritier/jenkins,kzantow/jenkins,lvotypko/jenkins3,gorcz/jenkins,soenter/jenkins,paulmillar/jenkins,Ykus/jenkins,h4ck3rm1k3/jenkins,albers/jenkins,DoctorQ/jenkins,mrooney/jenkins,intelchen/jenkins,azweb76/jenkins,1and1/jenkins,jcsirot/jenkins,stephenc/jenkins,ChrisA89/jenkins,chbiel/jenkins,evernat/jenkins,christ66/jenkins,jglick/jenkins,mrooney/jenkins,ydubreuil/jenkins,dbroady1/jenkins,arcivanov/jenkins,petermarcoen/jenkins,lindzh/jenkins,jpederzolli/jenkins-1,keyurpatankar/hudson,nandan4/Jenkins,daspilker/jenkins,wuwen5/jenkins,damianszczepanik/jenkins,khmarbaise/jenkins,huybrechts/hudson,lindzh/jenkins,dennisjlee/jenkins,damianszczepanik/jenkins,seanlin816/jenkins,Jochen-A-Fuerbacher/jenkins,aheritier/jenkins,SenolOzer/jenkins,svanoort/jenkins,gitaccountforprashant/gittest,SebastienGllmt/jenkins,my7seven/jenkins,deadmoose/jenkins,stefanbrausch/hudson-main,Jochen-A-Fuerbacher/jenkins,jpederzolli/jenkins-1,jcsirot/jenkins,varmenise/jenkins,jpbriend/jenkins,mrobinet/jenkins,DoctorQ/jenkins,keyurpatankar/hudson,recena/jenkins,bpzhang/jenkins,KostyaSha/jenkins,MichaelPranovich/jenkins_sc,mcanthony/jenkins,mattclark/jenkins,mrooney/jenkins,sathiya-mit/jenkins,vlajos/jenkins,v1v/jenkins,292388900/jenkins,jcsirot/jenkins,akshayabd/jenkins,iqstack/jenkins,elkingtonmcb/jenkins,patbos/jenkins,guoxu0514/jenkins,thomassuckow/jenkins,soenter/jenkins,evernat/jenkins,h4ck3rm1k3/jenkins,hashar/jenkins,1and1/jenkins,dariver/jenkins,viqueen/jenkins,vijayto/jenkins,bkmeneguello/jenkins,FarmGeek4Life/jenkins,guoxu0514/jenkins,amruthsoft9/Jenkis,FTG-003/jenkins,Wilfred/jenkins,MarkEWaite/jenkins,nandan4/Jenkins,huybrechts/hudson,hashar/jenkins,mrooney/jenkins,jk47/jenkins,KostyaSha/jenkins,protazy/jenkins,lilyJi/jenkins,christ66/jenkins,lvotypko/jenkins,my7seven/jenkins,hashar/jenkins,lordofthejars/jenkins,bkmeneguello/jenkins,rsandell/jenkins,abayer/jenkins,sathiya-mit/jenkins,CodeShane/jenkins,wangyikai/jenkins,lvotypko/jenkins,batmat/jenkins,ErikVerheul/jenkins,FTG-003/jenkins,stefanbrausch/hudson-main,KostyaSha/jenkins,liupugong/jenkins,pantheon-systems/jenkins,sathiya-mit/jenkins,lindzh/jenkins,AustinKwang/jenkins,AustinKwang/jenkins,kohsuke/hudson,amruthsoft9/Jenkis,batmat/jenkins,alvarolobato/jenkins,escoem/jenkins,bpzhang/jenkins,jtnord/jenkins,olivergondza/jenkins,jhoblitt/jenkins,svanoort/jenkins,hudson/hudson-2.x,pselle/jenkins,nandan4/Jenkins,maikeffi/hudson,liorhson/jenkins,nandan4/Jenkins,292388900/jenkins,bpzhang/jenkins,mrooney/jenkins,stephenc/jenkins,jzjzjzj/jenkins,akshayabd/jenkins,wangyikai/jenkins,SenolOzer/jenkins,MichaelPranovich/jenkins_sc,CodeShane/jenkins,github-api-test-org/jenkins,shahharsh/jenkins,protazy/jenkins,deadmoose/jenkins,wuwen5/jenkins,hplatou/jenkins,liupugong/jenkins,kzantow/jenkins,elkingtonmcb/jenkins,maikeffi/hudson,rashmikanta-1984/jenkins,aquarellian/jenkins,1and1/jenkins,h4ck3rm1k3/jenkins,albers/jenkins,lordofthejars/jenkins,FTG-003/jenkins,msrb/jenkins,jtnord/jenkins,lvotypko/jenkins2,noikiy/jenkins,vjuranek/jenkins,jcsirot/jenkins,pselle/jenkins,292388900/jenkins,arunsingh/jenkins,MarkEWaite/jenkins,h4ck3rm1k3/jenkins,rlugojr/jenkins,damianszczepanik/jenkins,soenter/jenkins,Wilfred/jenkins,wuwen5/jenkins,v1v/jenkins,KostyaSha/jenkins,FTG-003/jenkins,brunocvcunha/jenkins,iqstack/jenkins,tfennelly/jenkins,aquarellian/jenkins,jcsirot/jenkins,verbitan/jenkins,varmenise/jenkins,shahharsh/jenkins,duzifang/my-jenkins,h4ck3rm1k3/jenkins,keyurpatankar/hudson,DanielWeber/jenkins,jk47/jenkins,NehemiahMi/jenkins,rlugojr/jenkins,v1v/jenkins,thomassuckow/jenkins,liorhson/jenkins,noikiy/jenkins,verbitan/jenkins,aldaris/jenkins,github-api-test-org/jenkins,gorcz/jenkins,bpzhang/jenkins,vivek/hudson,bkmeneguello/jenkins,svanoort/jenkins,ydubreuil/jenkins,ChrisA89/jenkins,scoheb/jenkins,lindzh/jenkins,godfath3r/jenkins,iqstack/jenkins,godfath3r/jenkins,FarmGeek4Life/jenkins,azweb76/jenkins,stefanbrausch/hudson-main,dennisjlee/jenkins,dbroady1/jenkins,pjanouse/jenkins,msrb/jenkins,akshayabd/jenkins,paulwellnerbou/jenkins,tastatur/jenkins,iterate/coding-dojo,goldchang/jenkins,albers/jenkins,soenter/jenkins,mattclark/jenkins,ikedam/jenkins,petermarcoen/jenkins,tfennelly/jenkins,Jochen-A-Fuerbacher/jenkins,keyurpatankar/hudson,6WIND/jenkins,recena/jenkins,tangkun75/jenkins,pantheon-systems/jenkins,hudson/hudson-2.x,gusreiber/jenkins,liupugong/jenkins,stefanbrausch/hudson-main,verbitan/jenkins,seanlin816/jenkins,maikeffi/hudson,jenkinsci/jenkins,NehemiahMi/jenkins,FTG-003/jenkins,stephenc/jenkins,lordofthejars/jenkins,oleg-nenashev/jenkins,verbitan/jenkins,ydubreuil/jenkins,samatdav/jenkins,guoxu0514/jenkins,fbelzunc/jenkins,aldaris/jenkins,SenolOzer/jenkins,daniel-beck/jenkins,jpbriend/jenkins,albers/jenkins,gusreiber/jenkins,ndeloof/jenkins,abayer/jenkins,aldaris/jenkins,intelchen/jenkins,FarmGeek4Life/jenkins,godfath3r/jenkins,Krasnyanskiy/jenkins,tfennelly/jenkins,petermarcoen/jenkins,andresrc/jenkins,mrobinet/jenkins,alvarolobato/jenkins,nandan4/Jenkins,ydubreuil/jenkins,stephenc/jenkins,singh88/jenkins,seanlin816/jenkins,singh88/jenkins,stephenc/jenkins,huybrechts/hudson,lvotypko/jenkins2,CodeShane/jenkins,godfath3r/jenkins,jenkinsci/jenkins,SebastienGllmt/jenkins,paulmillar/jenkins,pantheon-systems/jenkins,FarmGeek4Life/jenkins,evernat/jenkins,scoheb/jenkins,liorhson/jenkins,gusreiber/jenkins,varmenise/jenkins,albers/jenkins,MarkEWaite/jenkins,6WIND/jenkins,escoem/jenkins,hashar/jenkins,lilyJi/jenkins,lvotypko/jenkins,DanielWeber/jenkins,yonglehou/jenkins,MarkEWaite/jenkins,hplatou/jenkins,ikedam/jenkins,alvarolobato/jenkins,daspilker/jenkins,1and1/jenkins,mcanthony/jenkins,DoctorQ/jenkins,lilyJi/jenkins,mpeltonen/jenkins,goldchang/jenkins,Wilfred/jenkins,luoqii/jenkins,jpbriend/jenkins,wuwen5/jenkins,SebastienGllmt/jenkins,vjuranek/jenkins,ikedam/jenkins,1and1/jenkins,paulwellnerbou/jenkins,mattclark/jenkins,hplatou/jenkins,jtnord/jenkins,kohsuke/hudson,duzifang/my-jenkins,1and1/jenkins,Vlatombe/jenkins,seanlin816/jenkins,my7seven/jenkins,rsandell/jenkins,h4ck3rm1k3/jenkins,pantheon-systems/jenkins,chbiel/jenkins,pjanouse/jenkins,gorcz/jenkins,stefanbrausch/hudson-main,yonglehou/jenkins,arunsingh/jenkins,lvotypko/jenkins3,thomassuckow/jenkins,mattclark/jenkins,morficus/jenkins,andresrc/jenkins,dbroady1/jenkins,huybrechts/hudson,chbiel/jenkins,v1v/jenkins,alvarolobato/jenkins,andresrc/jenkins,github-api-test-org/jenkins,chbiel/jenkins,kzantow/jenkins,hemantojhaa/jenkins,Krasnyanskiy/jenkins,svanoort/jenkins,SenolOzer/jenkins,amruthsoft9/Jenkis,chbiel/jenkins,mcanthony/jenkins,hemantojhaa/jenkins,escoem/jenkins,ajshastri/jenkins,khmarbaise/jenkins,vijayto/jenkins,escoem/jenkins,292388900/jenkins,vvv444/jenkins,jtnord/jenkins,samatdav/jenkins,tangkun75/jenkins,arunsingh/jenkins,liorhson/jenkins,Vlatombe/jenkins,vvv444/jenkins,dennisjlee/jenkins,maikeffi/hudson,gitaccountforprashant/gittest,dennisjlee/jenkins,kohsuke/hudson,hashar/jenkins,bkmeneguello/jenkins,rsandell/jenkins,iqstack/jenkins,aldaris/jenkins,ChrisA89/jenkins,jpbriend/jenkins,SenolOzer/jenkins,vjuranek/jenkins,everyonce/jenkins,arunsingh/jenkins,mdonohue/jenkins,rashmikanta-1984/jenkins,KostyaSha/jenkins,rlugojr/jenkins,aldaris/jenkins,vivek/hudson,scoheb/jenkins,brunocvcunha/jenkins,hashar/jenkins,ns163/jenkins,alvarolobato/jenkins,MichaelPranovich/jenkins_sc,my7seven/jenkins,yonglehou/jenkins,petermarcoen/jenkins,soenter/jenkins,ikedam/jenkins,batmat/jenkins,everyonce/jenkins,scoheb/jenkins,iterate/coding-dojo,luoqii/jenkins,vjuranek/jenkins,kohsuke/hudson,deadmoose/jenkins,FTG-003/jenkins,jzjzjzj/jenkins,synopsys-arc-oss/jenkins,Ykus/jenkins,vijayto/jenkins,ChrisA89/jenkins,ErikVerheul/jenkins,MadsNielsen/jtemp,olivergondza/jenkins,ajshastri/jenkins,liupugong/jenkins,DoctorQ/jenkins,petermarcoen/jenkins,amruthsoft9/Jenkis,aheritier/jenkins,synopsys-arc-oss/jenkins,batmat/jenkins,tastatur/jenkins,rashmikanta-1984/jenkins,ndeloof/jenkins,liorhson/jenkins,iqstack/jenkins,rashmikanta-1984/jenkins,vjuranek/jenkins,amruthsoft9/Jenkis,liupugong/jenkins,ndeloof/jenkins,iqstack/jenkins,damianszczepanik/jenkins,gitaccountforprashant/gittest,hplatou/jenkins,MadsNielsen/jtemp,aldaris/jenkins,deadmoose/jenkins,daspilker/jenkins,jhoblitt/jenkins,dariver/jenkins,yonglehou/jenkins,aquarellian/jenkins,huybrechts/hudson,maikeffi/hudson,andresrc/jenkins,chbiel/jenkins,evernat/jenkins,6WIND/jenkins,paulmillar/jenkins,keyurpatankar/hudson,bpzhang/jenkins,github-api-test-org/jenkins,lordofthejars/jenkins,jglick/jenkins,lvotypko/jenkins2,vivek/hudson,wangyikai/jenkins,Ykus/jenkins,MichaelPranovich/jenkins_sc,lindzh/jenkins,vvv444/jenkins,luoqii/jenkins,bpzhang/jenkins,brunocvcunha/jenkins,aduprat/jenkins,elkingtonmcb/jenkins,aheritier/jenkins,goldchang/jenkins,lvotypko/jenkins2,6WIND/jenkins,jenkinsci/jenkins,AustinKwang/jenkins,1and1/jenkins,recena/jenkins,aduprat/jenkins,gorcz/jenkins,ajshastri/jenkins,soenter/jenkins,stefanbrausch/hudson-main,rlugojr/jenkins,jk47/jenkins,jzjzjzj/jenkins,aduprat/jenkins,jpederzolli/jenkins-1,batmat/jenkins,gusreiber/jenkins,tastatur/jenkins,olivergondza/jenkins,pselle/jenkins,liorhson/jenkins,mpeltonen/jenkins,lvotypko/jenkins3,wuwen5/jenkins,aquarellian/jenkins,amuniz/jenkins,kohsuke/hudson,recena/jenkins,jk47/jenkins,DanielWeber/jenkins,everyonce/jenkins,bkmeneguello/jenkins,MichaelPranovich/jenkins_sc,vijayto/jenkins,morficus/jenkins,arcivanov/jenkins,akshayabd/jenkins,Jimilian/jenkins,verbitan/jenkins,duzifang/my-jenkins,vlajos/jenkins,vvv444/jenkins,ikedam/jenkins,mdonohue/jenkins,AustinKwang/jenkins,arcivanov/jenkins,dennisjlee/jenkins,SenolOzer/jenkins,vvv444/jenkins,morficus/jenkins,andresrc/jenkins,MadsNielsen/jtemp,hudson/hudson-2.x,viqueen/jenkins,NehemiahMi/jenkins,christ66/jenkins,ns163/jenkins,mcanthony/jenkins,amuniz/jenkins,aquarellian/jenkins,gitaccountforprashant/gittest,iterate/coding-dojo,vijayto/jenkins,Jochen-A-Fuerbacher/jenkins,yonglehou/jenkins,thomassuckow/jenkins,jcarrothers-sap/jenkins,Jimilian/jenkins,paulwellnerbou/jenkins,godfath3r/jenkins,vjuranek/jenkins,AustinKwang/jenkins,morficus/jenkins,luoqii/jenkins,Vlatombe/jenkins,seanlin816/jenkins,luoqii/jenkins,MarkEWaite/jenkins,tangkun75/jenkins,msrb/jenkins,andresrc/jenkins,vlajos/jenkins,MarkEWaite/jenkins,shahharsh/jenkins,CodeShane/jenkins,mpeltonen/jenkins,shahharsh/jenkins,ChrisA89/jenkins,daniel-beck/jenkins,everyonce/jenkins,arunsingh/jenkins,sathiya-mit/jenkins,samatdav/jenkins,brunocvcunha/jenkins,gitaccountforprashant/gittest,lordofthejars/jenkins,ns163/jenkins,Krasnyanskiy/jenkins,lilyJi/jenkins,mrobinet/jenkins,github-api-test-org/jenkins,bkmeneguello/jenkins,noikiy/jenkins,lvotypko/jenkins3,lilyJi/jenkins,liupugong/jenkins,patbos/jenkins,svanoort/jenkins,oleg-nenashev/jenkins,gitaccountforprashant/gittest,protazy/jenkins,amuniz/jenkins,SebastienGllmt/jenkins,tangkun75/jenkins,Jimilian/jenkins,hemantojhaa/jenkins,wangyikai/jenkins,tastatur/jenkins,dariver/jenkins,guoxu0514/jenkins,lvotypko/jenkins3,lordofthejars/jenkins,albers/jenkins,brunocvcunha/jenkins,batmat/jenkins,jcsirot/jenkins,shahharsh/jenkins,aduprat/jenkins,aquarellian/jenkins,noikiy/jenkins,lvotypko/jenkins,aduprat/jenkins,Ykus/jenkins,singh88/jenkins,luoqii/jenkins,arcivanov/jenkins,gusreiber/jenkins,samatdav/jenkins,morficus/jenkins,ajshastri/jenkins,hudson/hudson-2.x,tfennelly/jenkins,hudson/hudson-2.x,tangkun75/jenkins,damianszczepanik/jenkins,jcarrothers-sap/jenkins,hplatou/jenkins,kohsuke/hudson,dariver/jenkins,mpeltonen/jenkins,jk47/jenkins,singh88/jenkins,6WIND/jenkins,intelchen/jenkins,ns163/jenkins,jcarrothers-sap/jenkins,pjanouse/jenkins,FarmGeek4Life/jenkins,jcarrothers-sap/jenkins,daspilker/jenkins,intelchen/jenkins,paulmillar/jenkins,pantheon-systems/jenkins,Jimilian/jenkins,huybrechts/hudson,v1v/jenkins,recena/jenkins,thomassuckow/jenkins,github-api-test-org/jenkins,vlajos/jenkins,jzjzjzj/jenkins,pselle/jenkins,rlugojr/jenkins,oleg-nenashev/jenkins,CodeShane/jenkins,hemantojhaa/jenkins,iterate/coding-dojo,varmenise/jenkins,yonglehou/jenkins,FarmGeek4Life/jenkins,protazy/jenkins,vlajos/jenkins,dbroady1/jenkins,everyonce/jenkins,batmat/jenkins,goldchang/jenkins,ydubreuil/jenkins,KostyaSha/jenkins,akshayabd/jenkins,bpzhang/jenkins,Jochen-A-Fuerbacher/jenkins,shahharsh/jenkins,ydubreuil/jenkins,jk47/jenkins,lilyJi/jenkins,DanielWeber/jenkins,hplatou/jenkins,AustinKwang/jenkins,tastatur/jenkins,FarmGeek4Life/jenkins,olivergondza/jenkins,evernat/jenkins,jhoblitt/jenkins,synopsys-arc-oss/jenkins,ikedam/jenkins,khmarbaise/jenkins,pselle/jenkins,ErikVerheul/jenkins,shahharsh/jenkins,godfath3r/jenkins,Jimilian/jenkins,khmarbaise/jenkins,DoctorQ/jenkins,csimons/jenkins,elkingtonmcb/jenkins,jenkinsci/jenkins,alvarolobato/jenkins,vlajos/jenkins,mrooney/jenkins,evernat/jenkins,christ66/jenkins,SebastienGllmt/jenkins,rsandell/jenkins,yonglehou/jenkins,mattclark/jenkins,vivek/hudson,jzjzjzj/jenkins,jcarrothers-sap/jenkins,intelchen/jenkins,lvotypko/jenkins2,ikedam/jenkins,amuniz/jenkins,Jimilian/jenkins,viqueen/jenkins,SenolOzer/jenkins,rsandell/jenkins,daniel-beck/jenkins,DanielWeber/jenkins,NehemiahMi/jenkins,mcanthony/jenkins,hemantojhaa/jenkins,stefanbrausch/hudson-main,abayer/jenkins,ikedam/jenkins,dbroady1/jenkins,damianszczepanik/jenkins,jk47/jenkins,sathiya-mit/jenkins,rashmikanta-1984/jenkins,fbelzunc/jenkins,dariver/jenkins,maikeffi/hudson,gorcz/jenkins,lvotypko/jenkins,jtnord/jenkins,jpederzolli/jenkins-1,Vlatombe/jenkins,jpederzolli/jenkins-1,svanoort/jenkins,ErikVerheul/jenkins,MichaelPranovich/jenkins_sc,singh88/jenkins,292388900/jenkins,DanielWeber/jenkins,amruthsoft9/Jenkis,akshayabd/jenkins,dariver/jenkins,ajshastri/jenkins,NehemiahMi/jenkins,everyonce/jenkins,jpederzolli/jenkins-1,amuniz/jenkins,tfennelly/jenkins,MichaelPranovich/jenkins_sc,lvotypko/jenkins2,vivek/hudson,pselle/jenkins,lilyJi/jenkins,ns163/jenkins,Krasnyanskiy/jenkins,Wilfred/jenkins,tangkun75/jenkins,paulmillar/jenkins,kohsuke/hudson,vivek/hudson,iqstack/jenkins,pantheon-systems/jenkins,lindzh/jenkins,brunocvcunha/jenkins,varmenise/jenkins,jcsirot/jenkins,Vlatombe/jenkins,christ66/jenkins,mpeltonen/jenkins,luoqii/jenkins,andresrc/jenkins,NehemiahMi/jenkins,hemantojhaa/jenkins,damianszczepanik/jenkins,varmenise/jenkins,arcivanov/jenkins,lindzh/jenkins,azweb76/jenkins,jhoblitt/jenkins,jhoblitt/jenkins,mrobinet/jenkins,amruthsoft9/Jenkis,gorcz/jenkins,wangyikai/jenkins,guoxu0514/jenkins,oleg-nenashev/jenkins,mcanthony/jenkins,AustinKwang/jenkins,Ykus/jenkins,jzjzjzj/jenkins,bkmeneguello/jenkins,DanielWeber/jenkins,mrobinet/jenkins,ndeloof/jenkins,escoem/jenkins,vlajos/jenkins,KostyaSha/jenkins,msrb/jenkins,thomassuckow/jenkins,lordofthejars/jenkins,hashar/jenkins,duzifang/my-jenkins,daspilker/jenkins,csimons/jenkins,github-api-test-org/jenkins,gusreiber/jenkins,escoem/jenkins,olivergondza/jenkins,6WIND/jenkins,vivek/hudson,jenkinsci/jenkins,github-api-test-org/jenkins,Ykus/jenkins,goldchang/jenkins,maikeffi/hudson,jcarrothers-sap/jenkins,vivek/hudson,duzifang/my-jenkins,albers/jenkins,damianszczepanik/jenkins,verbitan/jenkins,Vlatombe/jenkins,patbos/jenkins,noikiy/jenkins,gorcz/jenkins,samatdav/jenkins,aheritier/jenkins,arcivanov/jenkins,pjanouse/jenkins,hemantojhaa/jenkins,stephenc/jenkins,keyurpatankar/hudson,rlugojr/jenkins,alvarolobato/jenkins,kzantow/jenkins,duzifang/my-jenkins,jglick/jenkins,292388900/jenkins,azweb76/jenkins,Jochen-A-Fuerbacher/jenkins,mpeltonen/jenkins,elkingtonmcb/jenkins,guoxu0514/jenkins,Krasnyanskiy/jenkins,paulmillar/jenkins,oleg-nenashev/jenkins,fbelzunc/jenkins,rsandell/jenkins,Krasnyanskiy/jenkins,SebastienGllmt/jenkins,mdonohue/jenkins,noikiy/jenkins,mdonohue/jenkins,gorcz/jenkins,jglick/jenkins,viqueen/jenkins,sathiya-mit/jenkins,morficus/jenkins,gusreiber/jenkins,gitaccountforprashant/gittest
|
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.cli;
import hudson.remoting.Channel;
import hudson.remoting.RemoteInputStream;
import hudson.remoting.RemoteOutputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* CLI entry point to Hudson.
*
* @author Kohsuke Kawaguchi
*/
public class CLI {
public static void main(final String[] _args) throws Exception {
List<String> args = Arrays.asList(_args);
String url = System.getenv("HUDSON_URL");
while(!args.isEmpty()) {
String head = args.get(0);
if(head.equals("-s") && args.size()>=2) {
url = args.get(1);
args = args.subList(2,args.size());
continue;
}
break;
}
if(url==null)
printUsageAndExit(Messages.CLI_NoURL());
if(!url.endsWith("/")) url+='/';
url+="cli";
if(args.isEmpty())
args = Arrays.asList("help"); // default to help
FullDuplexHttpStream con = new FullDuplexHttpStream(new URL(url));
ExecutorService pool = Executors.newCachedThreadPool();
Channel channel = new Channel("Chunked connection to "+url,
pool,con.getInputStream(),con.getOutputStream());
// execute the command
int r=-1;
try {
CliEntryPoint cli = (CliEntryPoint)channel.getRemoteProperty(CliEntryPoint.class.getName());
if(cli.protocolVersion()!=CliEntryPoint.VERSION) {
System.err.println(Messages.CLI_VersionMismatch());
} else {
r = cli.main(args, Locale.getDefault(), new RemoteInputStream(System.in),
new RemoteOutputStream(System.out), new RemoteOutputStream(System.err));
}
} finally {
channel.close();
pool.shutdown();
}
System.exit(r);
}
private static void printUsageAndExit(String msg) {
if(msg!=null) System.out.println(msg);
System.err.println(Messages.CLI_Usage());
System.exit(-1);
}
}
|
cli/src/main/java/hudson/cli/CLI.java
|
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.cli;
import hudson.remoting.Channel;
import hudson.remoting.RemoteInputStream;
import hudson.remoting.RemoteOutputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* CLI entry point to Hudson.
*
* @author Kohsuke Kawaguchi
*/
public class CLI {
public static void main(final String[] _args) throws Exception {
List<String> args = Arrays.asList(_args);
String url = System.getenv("HUDSON_URL");
while(!args.isEmpty()) {
String head = args.get(0);
if(head.equals("-s") && args.size()>=2) {
url = args.get(1);
args = args.subList(2,args.size());
continue;
}
break;
}
if(url==null)
printUsageAndExit(Messages.CLI_NoURL());
if(args.isEmpty())
args = Arrays.asList("help"); // default to help
FullDuplexHttpStream con = new FullDuplexHttpStream(new URL(url));
ExecutorService pool = Executors.newCachedThreadPool();
Channel channel = new Channel("Chunked connection to "+url,
pool,con.getInputStream(),con.getOutputStream());
// execute the command
int r=-1;
try {
CliEntryPoint cli = (CliEntryPoint)channel.getRemoteProperty(CliEntryPoint.class.getName());
if(cli.protocolVersion()!=CliEntryPoint.VERSION) {
System.err.println(Messages.CLI_VersionMismatch());
} else {
r = cli.main(args, Locale.getDefault(), new RemoteInputStream(System.in),
new RemoteOutputStream(System.out), new RemoteOutputStream(System.err));
}
} finally {
channel.close();
pool.shutdown();
}
System.exit(r);
}
private static void printUsageAndExit(String msg) {
if(msg!=null) System.out.println(msg);
System.err.println(Messages.CLI_Usage());
System.exit(-1);
}
}
|
needs to call the CLI URL
git-svn-id: 28f34f9aa52bc55a5ddd5be9e183c5cccadc6ee4@17604 71c3de6d-444a-0410-be80-ed276b4c234a
|
cli/src/main/java/hudson/cli/CLI.java
|
needs to call the CLI URL
|
|
Java
|
mit
|
3c20010602b1903ee451f59a15e69cf700de3a6f
| 0
|
eparlato/point-of-sale-exercise
|
package pointofsale;
import java.util.HashMap;
public class MyShop {
Store store;
Display display;
PointOfSale pos;
public static void main(String[] args) {
MyShop myShop = new MyShop();
myShop.setup();
myShop.scanAFewBarcodes();
}
private void scanAFewBarcodes() {
pos.onBarcode("qwerty1");
pos.onBarcode("zxcvbn3");
pos.onBarcode("");
pos.onBarcode(null);
pos.onBarcode("lkjhgf");
}
private void setup() {
HashMap<String, Item> items = new HashMap<String, Item>();
items.put("qwerty1", new Item("qwerty1", "$10.0"));
items.put("asdfgh2", new Item("asdfgh2", "$20.0"));
items.put("zxcvbn3", new Item("zxcvbn3", "$30.0"));
store = new InMemoryStore(items);
display = new ConsoleDisplay();
pos = new PointOfSale(display, store);
}
}
|
src/main/java/pointofsale/MyShop.java
|
package pointofsale;
import java.util.HashMap;
public class MyShop {
static Store store;
static Display display;
static PointOfSale pos;
public static void main(String[] args) {
setup();
pos.onBarcode("qwerty1");
pos.onBarcode("zxcvbn3");
pos.onBarcode("");
pos.onBarcode(null);
pos.onBarcode("lkjhgf");
}
private static void setup() {
HashMap<String, Item> items = new HashMap<String, Item>();
items.put("qwerty1", new Item("qwerty1", "$10.0"));
items.put("asdfgh2", new Item("asdfgh2", "$20.0"));
items.put("zxcvbn3", new Item("zxcvbn3", "$30.0"));
store = new InMemoryStore(items);
display = new ConsoleDisplay();
pos = new PointOfSale(display, store);
}
}
|
Removed unnecessary static objects and methods from MyShop
|
src/main/java/pointofsale/MyShop.java
|
Removed unnecessary static objects and methods from MyShop
|
|
Java
|
mit
|
48744f65c21bb7247c62f585e2964761ca4ca13b
| 0
|
EivindEE/SemFM,EivindEE/SemFM
|
package edu.uib.info310.search;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import edu.uib.info310.model.Artist;
import edu.uib.info310.model.Event;
import edu.uib.info310.model.Record;
import edu.uib.info310.model.Track;
import edu.uib.info310.model.imp.ArtistImpl;
import edu.uib.info310.model.imp.EventImpl;
import edu.uib.info310.model.imp.RecordImp;
import edu.uib.info310.search.builder.OntologyBuilder;
@Component
public class SearcherImpl implements Searcher {
private static final Logger LOGGER = LoggerFactory.getLogger(SearcherImpl.class);
private OntologyBuilder builder = new OntologyBuilder();
private Model model;
private ArtistImpl artist;
public Artist searchArtist(String search_string) throws ArtistNotFoundException {
this.artist = new ArtistImpl();
this.model = builder.createArtistOntology(search_string);
LOGGER.debug("Size of infered model: " + model.size());
setArtistIdAndName();
setSimilarArtist();
setArtistEvents();
setArtistDiscography();
setArtistInfo();
return this.artist;
}
private void setArtistIdAndName() {
String getIdStr = "PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX mo:<http://purl.org/ontology/mo/> PREFIX foaf:<http://xmlns.com/foaf/0.1/> SELECT ?id ?name WHERE {?id foaf:name ?name; mo:similar-to ?something.}";
QueryExecution execution = QueryExecutionFactory.create(getIdStr, model);
ResultSet similarResults = execution.execSelect();
if(similarResults.hasNext()){
QuerySolution solution = similarResults.next();
this.artist.setId(solution.get("id").toString());
this.artist.setName(solution.get("name").toString());
}
LOGGER.debug("Artist id set to " + this.artist.getId());
}
private void setArtistDiscography() {
List<Record> discog = new LinkedList<Record>();
Map<String,Record> uniqueRecord = new HashMap<String, Record>();
String getDiscographyStr = "PREFIX foaf: <http://xmlns.com/foaf/0.1/> " +
"PREFIX mo: <http://purl.org/ontology/mo/> " +
"PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> " +
"PREFIX dc: <http://purl.org/dc/terms/> " +
"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
"SELECT DISTINCT " +
" ?artistId ?albumId ?release ?title ?image ?year ?labelId ?labelName ?track ?artist "+
" WHERE { " +
// "?artistId foaf:name \"" + artist.getName() + "\". "+
//"<" + this.artist.getId() + "> foaf:made ?albumId."+
"?artistId foaf:made ?albumId. " +
"?albumId dc:title ?title." +
"OPTIONAL {?albumId mo:publisher ?labelId. } "+
"OPTIONAL {?albumId dc:issued ?year. }" +
"OPTIONAL {?albumId foaf:depiction ?image. }" +
"}";
LOGGER.debug("Search for albums for artist with name: " + this.artist.getName() + ", with query:" + getDiscographyStr);
QueryExecution execution = QueryExecutionFactory.create(getDiscographyStr, model);
ResultSet albums = execution.execSelect();
LOGGER.debug("Found records? " + albums.hasNext());
while(albums.hasNext()){
RecordImp recordResult = new RecordImp();
QuerySolution queryAlbum = albums.next();
recordResult.setId(queryAlbum.get("albumId").toString());
recordResult.setName(queryAlbum.get("title").toString());
if(queryAlbum.get("image") != null) {
recordResult.setImage(queryAlbum.get("image").toString());
}
if(queryAlbum.get("year") != null) {
recordResult.setYear(queryAlbum.get("year").toString());
}
if(recordResult.getImage() != null){
uniqueRecord.put(recordResult.getName(), recordResult);
}
}
for(Record record : uniqueRecord.values()){
discog.add(record);
}
this.artist.setDiscography(discog);
LOGGER.debug("Found "+ artist.getDiscography().size() +" artist records");
}
private void setSimilarArtist() {
List<Artist> similar = new LinkedList<Artist>();
String similarStr = "PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
"PREFIX mo:<http://purl.org/ontology/mo/> " +
"PREFIX foaf:<http://xmlns.com/foaf/0.1/> " +
"SELECT ?name ?id ?image " +
" WHERE { <" + this.artist.getId() + "> mo:similar-to ?id . " +
"?id foaf:name ?name; " +
" mo:image ?image } ";
QueryExecution execution = QueryExecutionFactory.create(similarStr, model);
ResultSet similarResults = execution.execSelect();
while(similarResults.hasNext()){
ArtistImpl similarArtist = new ArtistImpl();
QuerySolution queryArtist = similarResults.next();
similarArtist.setName(queryArtist.get("name").toString());
similarArtist.setId(queryArtist.get("id").toString());
similarArtist.setImage(queryArtist.get("image").toString());
similar.add(similarArtist);
}
artist.setSimilar(similar);
LOGGER.debug("Found " + this.artist.getSimilar().size() +" similar artists");
}
private void setArtistEvents(){
List<Event> events = new LinkedList<Event>();
String getArtistEventsStr = " PREFIX foaf:<http://xmlns.com/foaf/0.1/> PREFIX event: <http://purl.org/NET/c4dm/event.owl#> PREFIX v: <http://www.w3.org/2006/vcard/ns#> PREFIX geo: <http://www.w3.org/2003/01/geo/wgs84_pos#>" +
"SELECT ?venueId ?venueName ?date ?lng ?lat ?location " +
" WHERE {?preformance foaf:hasAgent <" + this.artist.getId() + ">; event:place ?venueId; event:time ?date. ?venueId v:organisation-name ?venueName; geo:lat ?lat; geo:long ?lng; v:locality ?location}";
QueryExecution execution = QueryExecutionFactory.create(getArtistEventsStr, model);
ResultSet eventResults = execution.execSelect();
while(eventResults.hasNext()){
EventImpl event = new EventImpl();
QuerySolution queryEvent = eventResults.next();
event.setId(queryEvent.get("venueId").toString());
event.setVenue(queryEvent.get("venueName").toString());
event.setLat(queryEvent.get("lat").toString());
event.setLng(queryEvent.get("lng").toString());
event.setDate(queryEvent.get("date").toString());
event.setLocation(queryEvent.get("location").toString());
events.add(event);
}
this.artist.setEvents(events);
LOGGER.debug("Found "+ artist.getEvents().size() +" artist events");
}
private void setArtistInfo() {
String id = " <" + artist.getId() + "> ";
String getArtistInfoStr = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
"PREFIX foaf: <http://xmlns.com/foaf/0.1/> " +
"PREFIX mo: <http://purl.org/ontology/mo/> " +
"PREFIX dbpedia: <http://dbpedia.org/property/> " +
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " +
"PREFIX owl: <http://www.w3.org/2002/07/owl#> " +
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " +
"PREFIX dbont: <http://dbpedia.org/ontology/> " +
"SELECT DISTINCT * WHERE {" +
"OPTIONAL { ?artist mo:fanpage ?fanpage.} " +
"OPTIONAL { ?artist mo:imdb ?imdb. } " +
"OPTIONAL { ?artist mo:myspace ?myspace. } " +
"OPTIONAL { ?artist mo:homepage ?homepage. } " +
"OPTIONAL { ?artist rdfs:comment ?shortDesc.} " +
"OPTIONAL { ?artist mo:image ?image}" +
"OPTIONAL { ?artist mo:biography ?bio. } " +
"OPTIONAL { ?artist dbont:birthname ?birthname} " +
"OPTIONAL { ?artist dbont:hometown ?hometown. } " +
"OPTIONAL { ?artist mo:origin ?origin. } " +
"OPTIONAL { ?artist mo:activity_start ?start. } " +
"OPTIONAL { ?artist mo:activity_end ?end. } " +
"OPTIONAL { ?artist dbont:birthDate ?birthdate. } " +
"OPTIONAL { ?artist dbont:deathDate ?deathdate. } " +
"OPTIONAL { ?artist mo:wikipedia ?wikipedia. } " +
"OPTIONAL { ?artist foaf:page ?bbcpage. }}";
QueryExecution ex = QueryExecutionFactory.create(getArtistInfoStr, model);
ResultSet results = ex.execSelect();
HashMap<String,String> metaMap = new HashMap<String,String>();
List<String> fanpages = new LinkedList<String>();
while(results.hasNext()) {
// TODO: optimize (e.g storing in variables instead of performing query.get several times?)
QuerySolution query = results.next();
if(query.get("image") != null){
artist.setImage(query.get("image").toString());
}
if(query.get("fanpage") != null){
String fanpage = "<a href=\"" + query.get("fanpage").toString() + "\">" + query.get("fanpage").toString() + "</a>";
if(!fanpages.contains(fanpage)) {
fanpages.add(fanpage);
}
}
if(query.get("bio") != null) {
artist.setBio(query.get("bio").toString());
}
if(query.get("wikipedia") != null) {
metaMap.put("Wikipedia", ("<a href=\"" + query.get("wikipedia").toString() + "\">" + query.get("wikipedia").toString() + "</a>"));
}
if(query.get("bbcpage") != null) {
metaMap.put("BBC Music", ("<a href=\"" + query.get("bbcpage").toString() + "\">" + query.get("bbcpage").toString() + "</a>"));
}
if(query.get("birthdate") != null) {
metaMap.put("Born", (query.get("birthdate").toString()));
}
if(query.get("homepage") != null) {
metaMap.put("Homepage", ("<a href=\"" + query.get("homepage").toString() + "\">" + query.get("homepage").toString() + "</a>"));
}
if(query.get("imdb") != null) {
metaMap.put("IMDB", ("<a href=\"" + query.get("imdb").toString() + "\">" + query.get("imdb").toString() + "</a>"));
}
if(query.get("myspace") != null) {
metaMap.put("MySpace", ("<a href=\"" + query.get("myspace").toString() + "\">" + query.get("myspace").toString() + "</a>"));
}
if(query.get("shortDesc") != null) {
artist.setShortDescription(query.get("shortDesc").toString());
}
if(query.get("birthname") != null) {
metaMap.put("Name", (query.get("birthname").toString()));
}
if(query.get("birthdate") != null) {
metaMap.put("Born", (query.get("birthdate").toString()));
}
if(query.get("deathdate") != null) {
metaMap.put("Died", (query.get("deathdate").toString()));
}
if(query.get("origin") != null) {
metaMap.put("From", (query.get("origin").toString()));
}
if(query.get("hometown") != null) {
metaMap.put("Living", (query.get("hometown").toString()));
}
if(query.get("start") != null) {
String activityStart = query.get("start").toString();
if(query.get("end") != null) {
activityStart += "-" + query.get("end").toString();
}
metaMap.put("Active",activityStart);
}
}
if(!fanpages.isEmpty()) {
metaMap.put("Fanpages", fanpages.toString());
}
artist.setMeta(metaMap);
LOGGER.debug("Found " + artist.getMeta().size() + " fun facts.");
}
public Event searchEvent(String search_string) {
// TODO Auto-generated method stub
return null;
}
public Record searchRecord(String search_string) {
// TODO Auto-generated method stub
return null;
}
public Track searchTrack(String search_string) {
// TODO Auto-generated method stub
return null;
}
public static void main(String[] args) throws ArtistNotFoundException {
Searcher searcher = new SearcherImpl();
searcher.searchArtist("Guns N Roses");
}
}
|
src/main/java/edu/uib/info310/search/SearcherImpl.java
|
package edu.uib.info310.search;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import edu.uib.info310.model.Artist;
import edu.uib.info310.model.Event;
import edu.uib.info310.model.Record;
import edu.uib.info310.model.Track;
import edu.uib.info310.model.imp.ArtistImpl;
import edu.uib.info310.model.imp.EventImpl;
import edu.uib.info310.model.imp.RecordImp;
import edu.uib.info310.search.builder.OntologyBuilder;
@Component
public class SearcherImpl implements Searcher {
private static final Logger LOGGER = LoggerFactory.getLogger(SearcherImpl.class);
private OntologyBuilder builder = new OntologyBuilder();
private Model model;
private ArtistImpl artist;
public Artist searchArtist(String search_string) throws ArtistNotFoundException {
this.artist = new ArtistImpl();
this.model = builder.createArtistOntology(search_string);
LOGGER.debug("Size of infered model: " + model.size());
setArtistIdAndName();
setSimilarArtist();
setArtistEvents();
setArtistDiscography();
setArtistInfo();
return this.artist;
}
private void setArtistIdAndName() {
String getIdStr = "PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX mo:<http://purl.org/ontology/mo/> PREFIX foaf:<http://xmlns.com/foaf/0.1/> SELECT ?id ?name WHERE {?id foaf:name ?name; mo:similar-to ?something.}";
QueryExecution execution = QueryExecutionFactory.create(getIdStr, model);
ResultSet similarResults = execution.execSelect();
if(similarResults.hasNext()){
QuerySolution solution = similarResults.next();
this.artist.setId(solution.get("id").toString());
this.artist.setName(solution.get("name").toString());
}
LOGGER.debug("Artist id set to " + this.artist.getId());
}
private void setArtistDiscography() {
List<Record> discog = new LinkedList<Record>();
Map<String,Record> uniqueRecord = new HashMap<String, Record>();
String getDiscographyStr = "PREFIX foaf: <http://xmlns.com/foaf/0.1/> " +
"PREFIX mo: <http://purl.org/ontology/mo/> " +
"PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> " +
"PREFIX dc: <http://purl.org/dc/terms/> " +
"PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
"SELECT DISTINCT " +
" ?artistId ?albumId ?release ?title ?image ?year ?labelId ?labelName ?track ?artist "+
" WHERE { " +
// "?artistId foaf:name \"" + artist.getName() + "\". "+
//"<" + this.artist.getId() + "> foaf:made ?albumId."+
"?artistId foaf:made ?albumId. " +
"?albumId dc:title ?title." +
"OPTIONAL {?albumId mo:publisher ?labelId. } "+
"OPTIONAL {?albumId dc:issued ?year. }" +
"OPTIONAL {?albumId foaf:depiction ?image. }" +
"}";
LOGGER.debug("Search for albums for artist with name: " + this.artist.getName() + ", with query:" + getDiscographyStr);
QueryExecution execution = QueryExecutionFactory.create(getDiscographyStr, model);
ResultSet albums = execution.execSelect();
LOGGER.debug("Found records? " + albums.hasNext());
while(albums.hasNext()){
RecordImp recordResult = new RecordImp();
QuerySolution queryAlbum = albums.next();
recordResult.setId(queryAlbum.get("albumId").toString());
recordResult.setName(queryAlbum.get("title").toString());
if(queryAlbum.get("image") != null) {
recordResult.setImage(queryAlbum.get("image").toString());
}
if(queryAlbum.get("year") != null) {
recordResult.setYear(queryAlbum.get("year").toString());
}
if(recordResult.getImage() != null){
uniqueRecord.put(recordResult.getName(), recordResult);
}
}
for(Record record : uniqueRecord.values()){
discog.add(record);
}
this.artist.setDiscography(discog);
LOGGER.debug("Found "+ artist.getDiscography().size() +" artist records");
}
private void setSimilarArtist() {
List<Artist> similar = new LinkedList<Artist>();
String similarStr = "PREFIX rdf:<http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
"PREFIX mo:<http://purl.org/ontology/mo/> " +
"PREFIX foaf:<http://xmlns.com/foaf/0.1/> " +
"SELECT ?name ?id ?image " +
" WHERE { <" + this.artist.getId() + "> mo:similar-to ?id . " +
"?id foaf:name ?name; " +
" mo:image ?image } ";
QueryExecution execution = QueryExecutionFactory.create(similarStr, model);
ResultSet similarResults = execution.execSelect();
while(similarResults.hasNext()){
ArtistImpl similarArtist = new ArtistImpl();
QuerySolution queryArtist = similarResults.next();
similarArtist.setName(queryArtist.get("name").toString());
similarArtist.setId(queryArtist.get("id").toString());
similarArtist.setImage(queryArtist.get("image").toString());
similar.add(similarArtist);
}
artist.setSimilar(similar);
LOGGER.debug("Found " + this.artist.getSimilar().size() +" similar artists");
}
private void setArtistEvents(){
List<Event> events = new LinkedList<Event>();
String getArtistEventsStr = " PREFIX foaf:<http://xmlns.com/foaf/0.1/> PREFIX event: <http://purl.org/NET/c4dm/event.owl#> PREFIX v: <http://www.w3.org/2006/vcard/ns#> PREFIX geo: <http://www.w3.org/2003/01/geo/wgs84_pos#>" +
"SELECT ?venueId ?venueName ?date ?lng ?lat ?location " +
" WHERE {?preformance foaf:hasAgent <" + this.artist.getId() + ">; event:place ?venueId; event:time ?date. ?venueId v:organisation-name ?venueName; geo:lat ?lat; geo:long ?lng; v:locality ?location}";
QueryExecution execution = QueryExecutionFactory.create(getArtistEventsStr, model);
ResultSet eventResults = execution.execSelect();
while(eventResults.hasNext()){
EventImpl event = new EventImpl();
QuerySolution queryEvent = eventResults.next();
event.setId(queryEvent.get("venueId").toString());
event.setVenue(queryEvent.get("venueName").toString());
event.setLat(queryEvent.get("lat").toString());
event.setLng(queryEvent.get("lng").toString());
event.setDate(queryEvent.get("date").toString());
event.setLocation(queryEvent.get("location").toString());
events.add(event);
}
this.artist.setEvents(events);
LOGGER.debug("Found "+ artist.getEvents().size() +" artist events");
}
private void setArtistInfo() {
String id = " <" + artist.getId() + "> ";
String getArtistInfoStr = "PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> " +
"PREFIX foaf: <http://xmlns.com/foaf/0.1/> " +
"PREFIX mo: <http://purl.org/ontology/mo/> " +
"PREFIX dbpedia: <http://dbpedia.org/property/> " +
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " +
"PREFIX owl: <http://www.w3.org/2002/07/owl#> " +
"PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> " +
"PREFIX dbont: <http://dbpedia.org/ontology/> " +
"SELECT DISTINCT * WHERE {" +
"OPTIONAL { ?bbcartist mo:fanpage ?fanpage.} " +
"OPTIONAL { ?bbcartist mo:imdb ?imdb. } " +
"OPTIONAL { ?bbcartist mo:myspace ?myspace. } " +
"OPTIONAL { ?bbcartist foaf:homepage ?homepage. } " +
"OPTIONAL { ?bbcartist rdfs:comment ?shortDesc. Filter (lang(?shortDesc) = '').} " +
"OPTIONAL { ?image foaf:name ?name; mo:image ?image}" +
"OPTIONAL { ?dbartist dbpedia:shortDescription ?shortDescEn .} " +
"OPTIONAL { ?dbartist dbpedia:abstract ?bio. Filter (lang(?bio) = 'en').} " +
"OPTIONAL { ?dbartist dbont:abstract ?bio. Filter (lang(?bio) = 'en').} " +
"OPTIONAL { ?dbartist dbont:birthname ?birthname} " +
"OPTIONAL { ?dbartist dbont:hometown ?origin. } " +
"OPTIONAL { ?dbartist dbpedia:origin ?origin. } " +
"OPTIONAL { ?dbartist dbpedia:yearsActive ?yearsactive. } " +
"OPTIONAL { ?dbartist dbpedia:dateOfBirth ?birthdate. } " +
"OPTIONAL { ?dbartist foaf:name ?name; foaf:page ?wikipedia. } " +
"OPTIONAL { ?bbcartist foaf:name ?name; foaf:page ?bbcpage. }}";
QueryExecution ex = QueryExecutionFactory.create(getArtistInfoStr, model);
ResultSet results = ex.execSelect();
HashMap<String,String> metaMap = new HashMap<String,String>();
List<String> fanpages = new LinkedList<String>();
while(results.hasNext()) {
// TODO: optimize (e.g storing in variables instead of performing query.get several times?)
QuerySolution query = results.next();
if(query.get("image") != null){
artist.setImage(query.get("image").toString());
}
if(query.get("fanpage") != null){
String fanpage = "<a href=\"" + query.get("fanpage").toString() + "\">" + query.get("fanpage").toString() + "</a>";
if(!fanpages.contains(fanpage)) {
fanpages.add(fanpage);
}
}
if(query.get("bio") != null) {
artist.setBio(query.get("bio").toString());
}
if(query.get("wikipedia") != null) {
metaMap.put("Wikipedia", ("<a href=\"" + query.get("wikipedia").toString() + "\">" + query.get("wikipedia").toString() + "</a>"));
}
if(query.get("bbcpage") != null) {
metaMap.put("BBC Music", ("<a href=\"" + query.get("bbcpage").toString() + "\">" + query.get("bbcpage").toString() + "</a>"));
}
if(query.get("birthdate") != null) {
metaMap.put("Born", (query.get("birthdate").toString()));
}
if(query.get("homepage") != null) {
metaMap.put("Homepage", ("<a href=\"" + query.get("homepage").toString() + "\">" + query.get("homepage").toString() + "</a>"));
}
if(query.get("imdb") != null) {
metaMap.put("IMDB", ("<a href=\"" + query.get("imdb").toString() + "\">" + query.get("imdb").toString() + "</a>"));
}
if(query.get("myspace") != null) {
metaMap.put("MySpace", ("<a href=\"" + query.get("myspace").toString() + "\">" + query.get("myspace").toString() + "</a>"));
}
if(query.get("shortDesc") != null) {
artist.setShortDescription(query.get("shortDesc").toString());
}else{
if(query.get("shortDescEn") != null){
artist.setShortDescription(query.get("shortDescEn").toString());
}
}
if(query.get("birthname") != null) {
metaMap.put("Name", (query.get("birthname").toString()));
}
if(query.get("origin") != null) {
metaMap.put("From", (query.get("origin").toString()));
}
if(query.get("yearsactive") != null) {
metaMap.put("Active", (query.get("yearsactive").toString()));
}
}
if(!fanpages.isEmpty()) {
metaMap.put("Fanpages", fanpages.toString());
}
artist.setMeta(metaMap);
LOGGER.debug("Found " + artist.getMeta().size() + " fun facts.");
}
public Event searchEvent(String search_string) {
// TODO Auto-generated method stub
return null;
}
public Record searchRecord(String search_string) {
// TODO Auto-generated method stub
return null;
}
public Track searchTrack(String search_string) {
// TODO Auto-generated method stub
return null;
}
public static void main(String[] args) throws ArtistNotFoundException {
Searcher searcher = new SearcherImpl();
searcher.searchArtist("Guns N Roses");
}
}
|
Rewrote the Metadata query to fit our new model
|
src/main/java/edu/uib/info310/search/SearcherImpl.java
|
Rewrote the Metadata query to fit our new model
|
|
Java
|
epl-1.0
|
3a8b599df01785db47af61c291dbb162e95d4c6a
| 0
|
rohitdubey12/kura,ymai/kura,nicolatimeus/kura,markoer/kura,ctron/kura,markoer/kura,darionct/kura,rohitdubey12/kura,ctron/kura,cdealti/kura,darionct/kura,ymai/kura,cdealti/kura,gavinying/kura,nicolatimeus/kura,markoer/kura,MMaiero/kura,cdealti/kura,nicolatimeus/kura,ymai/kura,ctron/kura,rohitdubey12/kura,unverbraucht/kura,gavinying/kura,ctron/kura,rohitdubey12/kura,amitjoy/kura,MMaiero/kura,markoer/kura,darionct/kura,rohitdubey12/kura,nicolatimeus/kura,MMaiero/kura,cdealti/kura,amitjoy/kura,gavinying/kura,amitjoy/kura,nicolatimeus/kura,markoer/kura,MMaiero/kura,unverbraucht/kura,MMaiero/kura,unverbraucht/kura,unverbraucht/kura,ctron/kura,gavinying/kura,darionct/kura,amitjoy/kura,darionct/kura,ymai/kura,amitjoy/kura,ymai/kura,unverbraucht/kura,MMaiero/kura,darionct/kura,cdealti/kura,ctron/kura,ymai/kura,gavinying/kura,markoer/kura,gavinying/kura,nicolatimeus/kura,cdealti/kura,amitjoy/kura
|
/*******************************************************************************
* Copyright (c) 2011, 2016 Eurotech and/or its affiliates
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech
*******************************************************************************/
package org.eclipse.kura.web.client.ui.Network;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.kura.web.client.messages.Messages;
import org.eclipse.kura.web.client.messages.ValidationMessages;
import org.eclipse.kura.web.client.ui.EntryClassUi;
import org.eclipse.kura.web.client.util.FailureHandler;
import org.eclipse.kura.web.client.util.MessageUtils;
import org.eclipse.kura.web.client.util.TextFieldValidator.FieldType;
import org.eclipse.kura.web.shared.model.GwtNetIfConfigMode;
import org.eclipse.kura.web.shared.model.GwtNetIfStatus;
import org.eclipse.kura.web.shared.model.GwtNetIfType;
import org.eclipse.kura.web.shared.model.GwtNetInterfaceConfig;
import org.eclipse.kura.web.shared.model.GwtSession;
import org.eclipse.kura.web.shared.model.GwtXSRFToken;
import org.eclipse.kura.web.shared.service.GwtNetworkService;
import org.eclipse.kura.web.shared.service.GwtNetworkServiceAsync;
import org.eclipse.kura.web.shared.service.GwtSecurityTokenService;
import org.eclipse.kura.web.shared.service.GwtSecurityTokenServiceAsync;
import org.gwtbootstrap3.client.ui.Alert;
import org.gwtbootstrap3.client.ui.Button;
import org.gwtbootstrap3.client.ui.Form;
import org.gwtbootstrap3.client.ui.FormControlStatic;
import org.gwtbootstrap3.client.ui.FormGroup;
import org.gwtbootstrap3.client.ui.FormLabel;
import org.gwtbootstrap3.client.ui.HelpBlock;
import org.gwtbootstrap3.client.ui.ListBox;
import org.gwtbootstrap3.client.ui.Modal;
import org.gwtbootstrap3.client.ui.PanelBody;
import org.gwtbootstrap3.client.ui.PanelHeader;
import org.gwtbootstrap3.client.ui.TextBox;
import org.gwtbootstrap3.client.ui.constants.ValidationState;
import org.gwtbootstrap3.client.ui.html.Span;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.BlurEvent;
import com.google.gwt.event.dom.client.BlurHandler;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.dom.client.MouseOutHandler;
import com.google.gwt.event.dom.client.MouseOverEvent;
import com.google.gwt.event.dom.client.MouseOverHandler;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Widget;
public class TabTcpIpUi extends Composite implements Tab {
private static final String IPV4_MODE_MANUAL = GwtNetIfConfigMode.netIPv4ConfigModeManual.name();
private static final String IPV4_MODE_DHCP = GwtNetIfConfigMode.netIPv4ConfigModeDHCP.name();
private static final String IPV4_MODE_DHCP_MESSAGE = MessageUtils.get(IPV4_MODE_DHCP);
private static final String IPV4_STATUS_WAN = GwtNetIfStatus.netIPv4StatusEnabledWAN.name();
private static final String IPV4_STATUS_WAN_MESSAGE = MessageUtils.get(IPV4_STATUS_WAN);
private static final String IPV4_STATUS_LAN = GwtNetIfStatus.netIPv4StatusEnabledLAN.name();
private static final String IPV4_STATUS_LAN_MESSAGE = MessageUtils.get(IPV4_STATUS_LAN);
private static final String IPV4_STATUS_DISABLED = GwtNetIfStatus.netIPv4StatusDisabled.name();
private static final String IPV4_STATUS_DISABLED_MESSAGE = MessageUtils.get(IPV4_STATUS_DISABLED);
private static TabTcpIpUiUiBinder uiBinder = GWT.create(TabTcpIpUiUiBinder.class);
private static final Logger logger = Logger.getLogger(TabTcpIpUi.class.getSimpleName());
private static final Messages MSGS = GWT.create(Messages.class);
private static final ValidationMessages VMSGS = GWT.create(ValidationMessages.class);
private final GwtSecurityTokenServiceAsync gwtXSRFService = GWT.create(GwtSecurityTokenService.class);
private final GwtNetworkServiceAsync gwtNetworkService = GWT.create(GwtNetworkService.class);
interface TabTcpIpUiUiBinder extends UiBinder<Widget, TabTcpIpUi> {
}
GwtSession session;
boolean m_dirty;
GwtNetInterfaceConfig selectedNetIfConfig;
NetworkTabsUi tabs;
@UiField
FormGroup groupIp, groupSubnet, groupGateway, groupDns;
@UiField
FormLabel labelStatus, labelConfigure, labelIp, labelSubnet, labelGateway,
labelDns, labelSearch;
@UiField
HelpBlock helpIp, helpSubnet, helpGateway, helpDns;
@UiField
TextBox ip, subnet, gateway, dns, search;
@UiField
ListBox status, configure;
@UiField
Button renew;
@UiField
PanelHeader helpTitle;
@UiField
PanelBody helpText;
@UiField
Form form;
@UiField
FormControlStatic dnsRead;
@UiField
Modal wanModal;
@UiField
Alert multipleWanWarn;
public TabTcpIpUi(GwtSession currentSession, NetworkTabsUi netTabs) {
initWidget(uiBinder.createAndBindUi(this));
session = currentSession;
tabs = netTabs;
helpTitle.setText("Help Text");
initForm();
dnsRead.setVisible(false);
initModal();
}
@Override
public void setDirty(boolean flag) {
m_dirty = flag;
}
@Override
public boolean isDirty() {
return m_dirty;
}
@Override
public void setNetInterface(GwtNetInterfaceConfig config) {
setDirty(true);
if ( config != null &&
config.getSubnetMask() != null &&
config.getSubnetMask().equals("255.255.255.255")) {
config.setSubnetMask("");
}
selectedNetIfConfig = config;
logger.fine(selectedNetIfConfig.getName());
logger.fine(selectedNetIfConfig.getConfigMode());
logger.fine(selectedNetIfConfig.getIpAddress());
// Remove LAN option for modems
if (selectedNetIfConfig != null && selectedNetIfConfig.getHwTypeEnum() == GwtNetIfType.MODEM) {
if (status != null) {
for (int i = 0; i < status.getItemCount(); i++) {
if (status.getItemText(i).equals(IPV4_STATUS_LAN_MESSAGE)) {
status.removeItem(i);
}
}
}
} else {
if (status != null) {
status.clear();
status.addItem(MessageUtils.get("netIPv4StatusDisabled"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledLAN"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledWAN"));
}
}
}
public void getUpdatedNetInterface(GwtNetInterfaceConfig updatedNetIf) {
if (form != null) {
if ( status.getSelectedItemText().equals(MessageUtils.get("netIPv4StatusDisabled"))) {
updatedNetIf.setStatus(IPV4_STATUS_DISABLED);
} else if (status.getSelectedItemText().equals(MessageUtils.get("netIPv4StatusEnabledLAN"))) {
updatedNetIf.setStatus(IPV4_STATUS_LAN);
} else {
updatedNetIf.setStatus(IPV4_STATUS_WAN);
}
if (IPV4_MODE_DHCP_MESSAGE.equals(configure.getSelectedItemText())) {
updatedNetIf.setConfigMode(IPV4_MODE_DHCP);
} else {
updatedNetIf.setConfigMode(IPV4_MODE_MANUAL);
}
if (ip.getValue() != null) {
updatedNetIf.setIpAddress(ip.getValue());
} else {
updatedNetIf.setIpAddress("");
}
if (subnet.getValue() != null) {
updatedNetIf.setSubnetMask(subnet.getValue());
} else {
updatedNetIf.setSubnetMask("");
}
if (gateway.getValue() != null) {
updatedNetIf.setGateway(gateway.getValue());
} else {
updatedNetIf.setGateway("");
}
if (dns.getValue() != null) {
updatedNetIf.setDnsServers(dns.getValue());
} else {
updatedNetIf.setDnsServers("");
}
if (search.getValue() != null) {
updatedNetIf.setSearchDomains(search.getValue());
} else {
updatedNetIf.setSearchDomains("");
}
}
}
public boolean isValid() {
boolean flag = true;
// check and make sure if 'Enabled for WAN' then either DHCP is selected
// or STATIC and a gateway is set
if ( !IPV4_STATUS_DISABLED_MESSAGE.equals(status.getSelectedValue()) &&
configure.getSelectedItemText().equalsIgnoreCase(VMSGS.netIPv4ConfigModeManual()) ) {
if ( (gateway.getValue() == null || gateway.getValue().trim().equals("")) &&
IPV4_STATUS_WAN_MESSAGE.equals(status.getSelectedValue()) ) {
groupGateway.setValidationState(ValidationState.ERROR);
helpGateway.setText(MSGS.netIPv4InvalidAddress());
flag = false;
}
if (ip.getValue() == null || ip.getValue().trim().equals("")) {
groupIp.setValidationState(ValidationState.ERROR);
helpIp.setText(MSGS.netIPv4InvalidAddress());
}
}
if ( groupIp.getValidationState().equals(ValidationState.ERROR) ||
groupSubnet.getValidationState().equals(ValidationState.ERROR) ||
groupGateway.getValidationState().equals(ValidationState.ERROR) ||
groupDns.getValidationState().equals(ValidationState.ERROR) ) {
flag = false;
}
return flag;
}
public boolean isLanEnabled() {
if (status == null) {
return false;
}
return IPV4_STATUS_LAN_MESSAGE.equals(status.getSelectedValue());
}
public boolean isWanEnabled() {
if (status == null) {
return false;
}
return IPV4_STATUS_WAN_MESSAGE.equals(status.getSelectedValue());
}
public String getStatus() {
return status.getSelectedValue();
}
public boolean isDhcp() {
if (configure == null) {
logger.log(Level.FINER, "TcpIpConfigTab.isDhcp() - m_configureCombo is null");
return true;
}
return (IPV4_MODE_DHCP_MESSAGE.equals(configure.getSelectedValue()));
}
@Override
public void refresh() {
if (isDirty()) {
setDirty(false);
if (selectedNetIfConfig == null) {
reset();
} else {
update();
}
resetValidations();
}
}
// ---------------Private Methods------------
private void initForm() {
// Labels
labelStatus.setText(MSGS.netIPv4Status());
labelConfigure.setText(MSGS.netIPv4Configure());
labelIp.setText(MSGS.netIPv4Address());
labelSubnet.setText(MSGS.netIPv4SubnetMask());
labelGateway.setText(MSGS.netIPv4Gateway());
labelDns.setText(MSGS.netIPv4DNSServers());
labelSearch.setText(MSGS.netIPv4SearchDomains());
for (GwtNetIfConfigMode mode : GwtNetIfConfigMode.values()) {
configure.addItem(MessageUtils.get(mode.name()));
}
// Populate status list
if (selectedNetIfConfig != null && selectedNetIfConfig.getHwTypeEnum() == GwtNetIfType.MODEM) {
if (status != null) {
status.clear();
status.addItem(MessageUtils.get("netIPv4StatusDisabled"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledWAN"));
}
} else {
if (status != null) {
status.clear();
status.addItem(MessageUtils.get("netIPv4StatusDisabled"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledLAN"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledWAN"));
}
}
// SetTooltips
// Status
status.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (status.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ModemToolTipStatus()));
}
}
});
status.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
status.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
tabs.adjustInterfaceTabs();
//TODO: to disable if disabled selected
// if (VMSGS.netIPv4StatusDisabled().equals(status.getSelectedValue())) {
// // Using DHCP selected
// configure.setEnabled(false);
// ip.setEnabled(false);
// subnet.setEnabled(false);
// gateway.setEnabled(false);
// renew.setEnabled(false);
// dnsRead.setVisible(false);
// dns.setVisible(false);
//
// } else {
refreshForm();
// }
// Check for other WAN interfaces if current interface is
// changed to WAN
if (isWanEnabled()) {
EntryClassUi.showWaitModal();
gwtNetworkService.findNetInterfaceConfigurations(new AsyncCallback<ArrayList<GwtNetInterfaceConfig>>() {
public void onFailure(Throwable caught) {
EntryClassUi.hideWaitModal();
FailureHandler.handle(caught);
}
public void onSuccess(ArrayList<GwtNetInterfaceConfig> result) {
EntryClassUi.hideWaitModal();
for (GwtNetInterfaceConfig config : result) {
if (config.getStatusEnum().equals(GwtNetIfStatus.netIPv4StatusEnabledWAN) && !config.getName().equals(selectedNetIfConfig.getName())) {
logger.log(Level.SEVERE, "Error: Status Invalid");
wanModal.show();
break;
}
}
}
});
}
}
});
// Configure
configure.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (configure.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipConfigure()));
}
}
});
configure.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
configure.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
tabs.adjustInterfaceTabs();
refreshForm();
resetValidations();
}
});
// Initial view of configure
if (configure.getSelectedItemText().equalsIgnoreCase(VMSGS.netIPv4ConfigModeDHCP())) {
// Using DHCP selected
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
renew.setEnabled(true);
} else if (configure.getSelectedItemText().equalsIgnoreCase(VMSGS.netIPv4ConfigModeManual())) {
// Manually selected
ip.setEnabled(true);
subnet.setEnabled(true);
gateway.setEnabled(true);
renew.setEnabled(false);
}
// IP Address
ip.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (ip.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipAddress()));
}
}
});
ip.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
ip.addBlurHandler(new BlurHandler() {
@Override
public void onBlur(BlurEvent event) {
setDirty(true);
if (!ip.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex())
|| !(ip.getText().trim().length() > 0)) {
groupIp.setValidationState(ValidationState.ERROR);
helpIp.setText(MSGS.netIPv4InvalidAddress());
} else {
groupIp.setValidationState(ValidationState.NONE);
helpIp.setText("");
}
}
});
// Subnet Mask
subnet.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (subnet.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipSubnetMask()));
}
}
});
subnet.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
subnet.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
if (!subnet.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex()) &&
subnet.getText().trim().length() > 0) {
groupSubnet.setValidationState(ValidationState.ERROR);
helpSubnet.setText(MSGS.netIPv4InvalidAddress());
} else {
groupSubnet.setValidationState(ValidationState.NONE);
helpSubnet.setText("");
}
}
});
// Gateway
gateway.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (gateway.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipGateway()));
}
}
});
gateway.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
gateway.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
if (!gateway.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex()) &&
gateway.getText().trim().length() > 0) {
groupGateway.setValidationState(ValidationState.ERROR);
helpGateway.setText(MSGS.netIPv4InvalidAddress());
} else {
groupGateway.setValidationState(ValidationState.NONE);
helpGateway.setText("");
}
}
});
// DNS Servers
dns.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (dns.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipDns()));
}
}
});
dns.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
dns.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
if (!dns.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex()) &&
dns.getText().trim().length() > 0) {
groupDns.setValidationState(ValidationState.ERROR);
helpDns.setText(MSGS.netIPv4InvalidAddress());
} else {
groupDns.setValidationState(ValidationState.NONE);
helpDns.setText("");
}
}
});
// Renew DHCP Lease
renew.setText(MSGS.netIPv4RenewDHCPLease());
renew.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
EntryClassUi.showWaitModal();
gwtXSRFService.generateSecurityToken(new AsyncCallback<GwtXSRFToken> () {
@Override
public void onFailure(Throwable ex) {
EntryClassUi.hideWaitModal();
FailureHandler.handle(ex);
}
@Override
public void onSuccess(GwtXSRFToken token) {
gwtNetworkService.renewDhcpLease(token, selectedNetIfConfig.getName(), new AsyncCallback<Void>() {
@Override
public void onFailure(Throwable ex) {
EntryClassUi.hideWaitModal();
FailureHandler.handle(ex);
}
@Override
public void onSuccess(Void result) {
refresh();
EntryClassUi.hideWaitModal();
}
});
}
});
}
});
renew.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (renew.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipRenew()));
}
}
});
renew.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
}
private void resetHelp() {
helpText.clear();
helpText.add(new Span("Mouse over enabled items on the left to see help text."));
}
private void update() {
if (selectedNetIfConfig != null) {
// Status
for (int i = 0; i < status.getItemCount(); i++) {
if (status.getItemText(i).equals(MessageUtils.get(selectedNetIfConfig.getStatus()))) {
status.setSelectedIndex(i);
break;
}
}
// Configure
for (int i = 0; i < configure.getItemCount(); i++) {
if (configure.getValue(i).equals(MessageUtils.get(selectedNetIfConfig.getConfigMode()))) {
configure.setSelectedIndex(i);
break;
}
}
tabs.adjustInterfaceTabs();
ip.setText(selectedNetIfConfig.getIpAddress());
subnet.setText(selectedNetIfConfig.getSubnetMask());
gateway.setText(selectedNetIfConfig.getGateway());
if (selectedNetIfConfig.getReadOnlyDnsServers() != null) {
dnsRead.setText(selectedNetIfConfig.getReadOnlyDnsServers());
dnsRead.setVisible(true);// ???
} else {
dnsRead.setText("");
dnsRead.setVisible(false);
}
if (selectedNetIfConfig.getDnsServers() != null) {
dns.setValue(selectedNetIfConfig.getDnsServers());
dns.setVisible(true);
} else {
dns.setVisible(false);
}
if (selectedNetIfConfig.getSearchDomains() != null) {
search.setText(selectedNetIfConfig.getSearchDomains());
} else {
search.setText("");
}
refreshForm();
}
}
private void refreshForm() {
if (selectedNetIfConfig != null && selectedNetIfConfig.getHwTypeEnum() == GwtNetIfType.MODEM) {
status.setEnabled(true);
configure.setEnabled(false);
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
dns.setEnabled(true);
search.setEnabled(false);
configure.setSelectedIndex(configure.getItemText(0).equals(IPV4_MODE_DHCP_MESSAGE) ? 0 : 1);
} else {
if (VMSGS.netIPv4StatusDisabled().equals(status.getSelectedValue())) {
String configureVal= configure.getItemText(0);
configure.setSelectedIndex(configureVal.equals(IPV4_MODE_DHCP_MESSAGE) ? 0 : 1);
ip.setText("");
configure.setEnabled(false);
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
dns.setEnabled(false);
search.setEnabled(false);
subnet.setText("");
gateway.setText("");
dns.setText("");
search.setText("");
} else {
configure.setEnabled(true);
String configureValue = configure.getSelectedValue();
if (configureValue.equals(IPV4_MODE_DHCP_MESSAGE)) {
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
renew.setEnabled(true);
} else {
ip.setEnabled(true);
subnet.setEnabled(true);
gateway.setEnabled(true);
if (status.getSelectedValue().equals(IPV4_STATUS_WAN_MESSAGE)) {
// enable gateway field
gateway.setEnabled(true);
} else {
gateway.setText("");
gateway.setEnabled(false);
}
renew.setEnabled(false);
}
dns.setEnabled(true);
search.setEnabled(true);
}
}
// Show read-only dns field when DHCP is selected and there are no
// custom DNS entries
String configureValue = configure.getSelectedItemText();
if ( configureValue.equals(IPV4_MODE_DHCP_MESSAGE) &&
(dns.getValue() == null || dns.getValue().isEmpty()) ) {
dnsRead.setVisible(true);
} else {
dnsRead.setVisible(false);
}
}
private void reset() {
status.setSelectedIndex(0);
configure.setSelectedIndex(0);
ip.setText("");
subnet.setText("");
gateway.setText("");
dns.setText("");
search.setText("");
update();
}
private void resetValidations() {
groupIp.setValidationState(ValidationState.NONE);
helpIp.setText("");
groupSubnet.setValidationState(ValidationState.NONE);
helpSubnet.setText("");
groupGateway.setValidationState(ValidationState.NONE);
helpGateway.setText("");
groupDns.setValidationState(ValidationState.NONE);
helpDns.setText("");
}
private void initModal() {
wanModal.setTitle("Warning!");
}
}
|
kura/org.eclipse.kura.web2/src/main/java/org/eclipse/kura/web/client/ui/Network/TabTcpIpUi.java
|
/*******************************************************************************
* Copyright (c) 2011, 2016 Eurotech and/or its affiliates
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech
*******************************************************************************/
package org.eclipse.kura.web.client.ui.Network;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.eclipse.kura.web.client.messages.Messages;
import org.eclipse.kura.web.client.messages.ValidationMessages;
import org.eclipse.kura.web.client.ui.EntryClassUi;
import org.eclipse.kura.web.client.util.FailureHandler;
import org.eclipse.kura.web.client.util.MessageUtils;
import org.eclipse.kura.web.client.util.TextFieldValidator.FieldType;
import org.eclipse.kura.web.shared.model.GwtNetIfConfigMode;
import org.eclipse.kura.web.shared.model.GwtNetIfStatus;
import org.eclipse.kura.web.shared.model.GwtNetIfType;
import org.eclipse.kura.web.shared.model.GwtNetInterfaceConfig;
import org.eclipse.kura.web.shared.model.GwtSession;
import org.eclipse.kura.web.shared.model.GwtXSRFToken;
import org.eclipse.kura.web.shared.service.GwtNetworkService;
import org.eclipse.kura.web.shared.service.GwtNetworkServiceAsync;
import org.eclipse.kura.web.shared.service.GwtSecurityTokenService;
import org.eclipse.kura.web.shared.service.GwtSecurityTokenServiceAsync;
import org.gwtbootstrap3.client.ui.Alert;
import org.gwtbootstrap3.client.ui.Button;
import org.gwtbootstrap3.client.ui.Form;
import org.gwtbootstrap3.client.ui.FormControlStatic;
import org.gwtbootstrap3.client.ui.FormGroup;
import org.gwtbootstrap3.client.ui.FormLabel;
import org.gwtbootstrap3.client.ui.HelpBlock;
import org.gwtbootstrap3.client.ui.ListBox;
import org.gwtbootstrap3.client.ui.Modal;
import org.gwtbootstrap3.client.ui.PanelBody;
import org.gwtbootstrap3.client.ui.PanelHeader;
import org.gwtbootstrap3.client.ui.TextBox;
import org.gwtbootstrap3.client.ui.constants.ValidationState;
import org.gwtbootstrap3.client.ui.html.Span;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.BlurEvent;
import com.google.gwt.event.dom.client.BlurHandler;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.dom.client.MouseOutHandler;
import com.google.gwt.event.dom.client.MouseOverEvent;
import com.google.gwt.event.dom.client.MouseOverHandler;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Widget;
public class TabTcpIpUi extends Composite implements Tab {
private static final String IPV4_MODE_MANUAL = GwtNetIfConfigMode.netIPv4ConfigModeManual.name();
private static final String IPV4_MODE_DHCP = GwtNetIfConfigMode.netIPv4ConfigModeDHCP.name();
private static final String IPV4_MODE_DHCP_MESSAGE = MessageUtils.get(IPV4_MODE_DHCP);
private static final String IPV4_STATUS_WAN = GwtNetIfStatus.netIPv4StatusEnabledWAN.name();
private static final String IPV4_STATUS_WAN_MESSAGE = MessageUtils.get(IPV4_STATUS_WAN);
private static final String IPV4_STATUS_LAN = GwtNetIfStatus.netIPv4StatusEnabledLAN.name();
private static final String IPV4_STATUS_LAN_MESSAGE = MessageUtils.get(IPV4_STATUS_LAN);
private static final String IPV4_STATUS_DISABLED = GwtNetIfStatus.netIPv4StatusDisabled.name();
private static final String IPV4_STATUS_DISABLED_MESSAGE = MessageUtils.get(IPV4_STATUS_DISABLED);
private static TabTcpIpUiUiBinder uiBinder = GWT.create(TabTcpIpUiUiBinder.class);
private static final Logger logger = Logger.getLogger(TabTcpIpUi.class.getSimpleName());
private static final Messages MSGS = GWT.create(Messages.class);
private static final ValidationMessages VMSGS = GWT.create(ValidationMessages.class);
private final GwtSecurityTokenServiceAsync gwtXSRFService = GWT.create(GwtSecurityTokenService.class);
private final GwtNetworkServiceAsync gwtNetworkService = GWT.create(GwtNetworkService.class);
interface TabTcpIpUiUiBinder extends UiBinder<Widget, TabTcpIpUi> {
}
GwtSession session;
boolean m_dirty;
GwtNetInterfaceConfig selectedNetIfConfig;
NetworkTabsUi tabs;
@UiField
FormGroup groupIp, groupSubnet, groupGateway, groupDns;
@UiField
FormLabel labelStatus, labelConfigure, labelIp, labelSubnet, labelGateway,
labelDns, labelSearch;
@UiField
HelpBlock helpIp, helpSubnet, helpGateway, helpDns;
@UiField
TextBox ip, subnet, gateway, dns, search;
@UiField
ListBox status, configure;
@UiField
Button renew;
@UiField
PanelHeader helpTitle;
@UiField
PanelBody helpText;
@UiField
Form form;
@UiField
FormControlStatic dnsRead;
@UiField
Modal wanModal;
@UiField
Alert multipleWanWarn;
public TabTcpIpUi(GwtSession currentSession, NetworkTabsUi netTabs) {
initWidget(uiBinder.createAndBindUi(this));
session = currentSession;
tabs = netTabs;
helpTitle.setText("Help Text");
initForm();
dnsRead.setVisible(false);
initModal();
}
@Override
public void setDirty(boolean flag) {
m_dirty = flag;
}
@Override
public boolean isDirty() {
return m_dirty;
}
@Override
public void setNetInterface(GwtNetInterfaceConfig config) {
setDirty(true);
if ( config != null &&
config.getSubnetMask() != null &&
config.getSubnetMask().equals("255.255.255.255")) {
config.setSubnetMask("");
}
selectedNetIfConfig = config;
logger.fine(selectedNetIfConfig.getName());
logger.fine(selectedNetIfConfig.getConfigMode());
logger.fine(selectedNetIfConfig.getIpAddress());
// Remove LAN option for modems
if (selectedNetIfConfig != null && selectedNetIfConfig.getHwTypeEnum() == GwtNetIfType.MODEM) {
if (status != null) {
for (int i = 0; i < status.getItemCount(); i++) {
if (status.getItemText(i).equals(IPV4_STATUS_LAN_MESSAGE)) {
status.removeItem(i);
}
}
}
} else {
if (status != null) {
status.clear();
status.addItem(MessageUtils.get("netIPv4StatusDisabled"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledLAN"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledWAN"));
}
}
}
public void getUpdatedNetInterface(GwtNetInterfaceConfig updatedNetIf) {
if (form != null) {
if ( status.getSelectedItemText().equals(MessageUtils.get("netIPv4StatusDisabled"))) {
updatedNetIf.setStatus(IPV4_STATUS_DISABLED);
} else if (status.getSelectedItemText().equals(MessageUtils.get("netIPv4StatusEnabledLAN"))) {
updatedNetIf.setStatus(IPV4_STATUS_LAN);
} else {
updatedNetIf.setStatus(IPV4_STATUS_WAN);
}
if (IPV4_MODE_DHCP_MESSAGE.equals(configure.getSelectedItemText())) {
updatedNetIf.setConfigMode(IPV4_MODE_DHCP);
} else {
updatedNetIf.setConfigMode(IPV4_MODE_MANUAL);
}
if (ip.getValue() != null) {
updatedNetIf.setIpAddress(ip.getValue());
} else {
updatedNetIf.setIpAddress("");
}
if (subnet.getValue() != null) {
updatedNetIf.setSubnetMask(subnet.getValue());
} else {
updatedNetIf.setSubnetMask("");
}
if (gateway.getValue() != null) {
updatedNetIf.setGateway(gateway.getValue());
} else {
updatedNetIf.setGateway("");
}
if (dns.getValue() != null) {
updatedNetIf.setDnsServers(dns.getValue());
} else {
updatedNetIf.setDnsServers("");
}
if (search.getValue() != null) {
updatedNetIf.setSearchDomains(search.getValue());
} else {
updatedNetIf.setSearchDomains("");
}
}
}
public boolean isValid() {
boolean flag = true;
// check and make sure if 'Enabled for WAN' then either DHCP is selected
// or STATIC and a gateway is set
if ( !IPV4_STATUS_DISABLED_MESSAGE.equals(status.getSelectedValue()) &&
configure.getSelectedItemText().equalsIgnoreCase(VMSGS.netIPv4ConfigModeManual()) ) {
if ( (gateway.getValue() == null || gateway.getValue().trim().equals("")) &&
IPV4_STATUS_WAN_MESSAGE.equals(status.getSelectedValue()) ) {
flag = false;
}
if (ip.getValue() == null || ip.getValue().trim().equals("")) {
groupIp.setValidationState(ValidationState.ERROR);
helpIp.setText(MSGS.netIPv4InvalidAddress());
}
}
if ( groupIp.getValidationState().equals(ValidationState.ERROR) ||
groupSubnet.getValidationState().equals(ValidationState.ERROR) ||
groupGateway.getValidationState().equals(ValidationState.ERROR) ||
groupDns.getValidationState().equals(ValidationState.ERROR) ) {
flag = false;
}
return flag;
}
public boolean isLanEnabled() {
if (status == null) {
return false;
}
return IPV4_STATUS_LAN_MESSAGE.equals(status.getSelectedValue());
}
public boolean isWanEnabled() {
if (status == null) {
return false;
}
return IPV4_STATUS_WAN_MESSAGE.equals(status.getSelectedValue());
}
public String getStatus() {
return status.getSelectedValue();
}
public boolean isDhcp() {
if (configure == null) {
logger.log(Level.FINER, "TcpIpConfigTab.isDhcp() - m_configureCombo is null");
return true;
}
return (IPV4_MODE_DHCP_MESSAGE.equals(configure.getSelectedValue()));
}
@Override
public void refresh() {
if (isDirty()) {
setDirty(false);
if (selectedNetIfConfig == null) {
reset();
} else {
update();
}
resetValidations();
}
}
// ---------------Private Methods------------
private void initForm() {
// Labels
labelStatus.setText(MSGS.netIPv4Status());
labelConfigure.setText(MSGS.netIPv4Configure());
labelIp.setText(MSGS.netIPv4Address());
labelSubnet.setText(MSGS.netIPv4SubnetMask());
labelGateway.setText(MSGS.netIPv4Gateway());
labelDns.setText(MSGS.netIPv4DNSServers());
labelSearch.setText(MSGS.netIPv4SearchDomains());
for (GwtNetIfConfigMode mode : GwtNetIfConfigMode.values()) {
configure.addItem(MessageUtils.get(mode.name()));
}
// Populate status list
if (selectedNetIfConfig != null && selectedNetIfConfig.getHwTypeEnum() == GwtNetIfType.MODEM) {
if (status != null) {
status.clear();
status.addItem(MessageUtils.get("netIPv4StatusDisabled"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledWAN"));
}
} else {
if (status != null) {
status.clear();
status.addItem(MessageUtils.get("netIPv4StatusDisabled"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledLAN"));
status.addItem(MessageUtils.get("netIPv4StatusEnabledWAN"));
}
}
// SetTooltips
// Status
status.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (status.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ModemToolTipStatus()));
}
}
});
status.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
status.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
tabs.adjustInterfaceTabs();
//TODO: to disable if disabled selected
// if (VMSGS.netIPv4StatusDisabled().equals(status.getSelectedValue())) {
// // Using DHCP selected
// configure.setEnabled(false);
// ip.setEnabled(false);
// subnet.setEnabled(false);
// gateway.setEnabled(false);
// renew.setEnabled(false);
// dnsRead.setVisible(false);
// dns.setVisible(false);
//
// } else {
refreshForm();
// }
// Check for other WAN interfaces if current interface is
// changed to WAN
if (isWanEnabled()) {
EntryClassUi.showWaitModal();
gwtNetworkService.findNetInterfaceConfigurations(new AsyncCallback<ArrayList<GwtNetInterfaceConfig>>() {
public void onFailure(Throwable caught) {
EntryClassUi.hideWaitModal();
FailureHandler.handle(caught);
}
public void onSuccess(ArrayList<GwtNetInterfaceConfig> result) {
EntryClassUi.hideWaitModal();
for (GwtNetInterfaceConfig config : result) {
if (config.getStatusEnum().equals(GwtNetIfStatus.netIPv4StatusEnabledWAN) && !config.getName().equals(selectedNetIfConfig.getName())) {
logger.log(Level.SEVERE, "Error: Status Invalid");
wanModal.show();
break;
}
}
}
});
}
}
});
// Configure
configure.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (configure.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipConfigure()));
}
}
});
configure.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
configure.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
tabs.adjustInterfaceTabs();
refreshForm();
resetValidations();
}
});
// Initial view of configure
if (configure.getSelectedItemText().equalsIgnoreCase(VMSGS.netIPv4ConfigModeDHCP())) {
// Using DHCP selected
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
renew.setEnabled(true);
} else if (configure.getSelectedItemText().equalsIgnoreCase(VMSGS.netIPv4ConfigModeManual())) {
// Manually selected
ip.setEnabled(true);
subnet.setEnabled(true);
gateway.setEnabled(true);
renew.setEnabled(false);
}
// IP Address
ip.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (ip.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipAddress()));
}
}
});
ip.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
ip.addBlurHandler(new BlurHandler() {
@Override
public void onBlur(BlurEvent event) {
setDirty(true);
if (!ip.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex())
|| !(ip.getText().trim().length() > 0)) {
groupIp.setValidationState(ValidationState.ERROR);
helpIp.setText(MSGS.netIPv4InvalidAddress());
} else {
groupIp.setValidationState(ValidationState.NONE);
helpIp.setText("");
}
}
});
// Subnet Mask
subnet.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (subnet.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipSubnetMask()));
}
}
});
subnet.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
subnet.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
if (!subnet.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex()) &&
subnet.getText().trim().length() > 0) {
groupSubnet.setValidationState(ValidationState.ERROR);
helpSubnet.setText(MSGS.netIPv4InvalidAddress());
} else {
groupSubnet.setValidationState(ValidationState.NONE);
helpSubnet.setText("");
}
}
});
// Gateway
gateway.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (gateway.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipGateway()));
}
}
});
gateway.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
gateway.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
if (!gateway.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex()) &&
gateway.getText().trim().length() > 0) {
groupGateway.setValidationState(ValidationState.ERROR);
helpGateway.setText(MSGS.netIPv4InvalidAddress());
} else {
groupGateway.setValidationState(ValidationState.NONE);
helpGateway.setText("");
}
}
});
// DNS Servers
dns.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (dns.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipDns()));
}
}
});
dns.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
dns.addChangeHandler(new ChangeHandler() {
@Override
public void onChange(ChangeEvent event) {
setDirty(true);
if (!dns.getText().trim().matches(FieldType.IPv4_ADDRESS.getRegex()) &&
dns.getText().trim().length() > 0) {
groupDns.setValidationState(ValidationState.ERROR);
helpDns.setText(MSGS.netIPv4InvalidAddress());
} else {
groupDns.setValidationState(ValidationState.NONE);
helpDns.setText("");
}
}
});
// Renew DHCP Lease
renew.setText(MSGS.netIPv4RenewDHCPLease());
renew.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
EntryClassUi.showWaitModal();
gwtXSRFService.generateSecurityToken(new AsyncCallback<GwtXSRFToken> () {
@Override
public void onFailure(Throwable ex) {
EntryClassUi.hideWaitModal();
FailureHandler.handle(ex);
}
@Override
public void onSuccess(GwtXSRFToken token) {
gwtNetworkService.renewDhcpLease(token, selectedNetIfConfig.getName(), new AsyncCallback<Void>() {
@Override
public void onFailure(Throwable ex) {
EntryClassUi.hideWaitModal();
FailureHandler.handle(ex);
}
@Override
public void onSuccess(Void result) {
refresh();
EntryClassUi.hideWaitModal();
}
});
}
});
}
});
renew.addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
if (renew.isEnabled()) {
helpText.clear();
helpText.add(new Span(MSGS.netIPv4ToolTipRenew()));
}
}
});
renew.addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
resetHelp();
}
});
}
private void resetHelp() {
helpText.clear();
helpText.add(new Span("Mouse over enabled items on the left to see help text."));
}
private void update() {
if (selectedNetIfConfig != null) {
// Status
for (int i = 0; i < status.getItemCount(); i++) {
if (status.getItemText(i).equals(MessageUtils.get(selectedNetIfConfig.getStatus()))) {
status.setSelectedIndex(i);
break;
}
}
// Configure
for (int i = 0; i < configure.getItemCount(); i++) {
if (configure.getValue(i).equals(MessageUtils.get(selectedNetIfConfig.getConfigMode()))) {
configure.setSelectedIndex(i);
break;
}
}
tabs.adjustInterfaceTabs();
ip.setText(selectedNetIfConfig.getIpAddress());
subnet.setText(selectedNetIfConfig.getSubnetMask());
gateway.setText(selectedNetIfConfig.getGateway());
if (selectedNetIfConfig.getReadOnlyDnsServers() != null) {
dnsRead.setText(selectedNetIfConfig.getReadOnlyDnsServers());
dnsRead.setVisible(true);// ???
} else {
dnsRead.setText("");
dnsRead.setVisible(false);
}
if (selectedNetIfConfig.getDnsServers() != null) {
dns.setValue(selectedNetIfConfig.getDnsServers());
dns.setVisible(true);
} else {
dns.setVisible(false);
}
if (selectedNetIfConfig.getSearchDomains() != null) {
search.setText(selectedNetIfConfig.getSearchDomains());
} else {
search.setText("");
}
refreshForm();
}
}
private void refreshForm() {
if (selectedNetIfConfig != null && selectedNetIfConfig.getHwTypeEnum() == GwtNetIfType.MODEM) {
status.setEnabled(true);
configure.setEnabled(false);
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
dns.setEnabled(true);
search.setEnabled(false);
configure.setSelectedIndex(configure.getItemText(0).equals(IPV4_MODE_DHCP_MESSAGE) ? 0 : 1);
} else {
if (VMSGS.netIPv4StatusDisabled().equals(status.getSelectedValue())) {
String configureVal= configure.getItemText(0);
configure.setSelectedIndex(configureVal.equals(IPV4_MODE_DHCP_MESSAGE) ? 0 : 1);
ip.setText("");
configure.setEnabled(false);
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
dns.setEnabled(false);
search.setEnabled(false);
subnet.setText("");
gateway.setText("");
dns.setText("");
search.setText("");
} else {
configure.setEnabled(true);
String configureValue = configure.getSelectedValue();
if (configureValue.equals(IPV4_MODE_DHCP_MESSAGE)) {
ip.setEnabled(false);
subnet.setEnabled(false);
gateway.setEnabled(false);
renew.setEnabled(true);
} else {
ip.setEnabled(true);
subnet.setEnabled(true);
gateway.setEnabled(true);
if (status.getSelectedValue().equals(IPV4_STATUS_WAN_MESSAGE)) {
// enable gateway field
gateway.setEnabled(true);
} else {
gateway.setText("");
gateway.setEnabled(false);
}
renew.setEnabled(false);
}
dns.setEnabled(true);
search.setEnabled(true);
}
}
// Show read-only dns field when DHCP is selected and there are no
// custom DNS entries
String configureValue = configure.getSelectedItemText();
if ( configureValue.equals(IPV4_MODE_DHCP_MESSAGE) &&
(dns.getValue() == null || dns.getValue().isEmpty()) ) {
dnsRead.setVisible(true);
} else {
dnsRead.setVisible(false);
}
}
private void reset() {
status.setSelectedIndex(0);
configure.setSelectedIndex(0);
ip.setText("");
subnet.setText("");
gateway.setText("");
dns.setText("");
search.setText("");
update();
}
private void resetValidations() {
groupIp.setValidationState(ValidationState.NONE);
helpIp.setText("");
groupSubnet.setValidationState(ValidationState.NONE);
helpSubnet.setText("");
groupGateway.setValidationState(ValidationState.NONE);
helpGateway.setText("");
groupDns.setValidationState(ValidationState.NONE);
helpDns.setText("");
}
private void initModal() {
wanModal.setTitle("Warning!");
}
}
|
Added help message for gateway text field.
Signed-off-by: MMaiero <2b73509565f5b07fd88cb0c94e2832558b2a0b6e@eurotech.com>
|
kura/org.eclipse.kura.web2/src/main/java/org/eclipse/kura/web/client/ui/Network/TabTcpIpUi.java
|
Added help message for gateway text field.
|
|
Java
|
mpl-2.0
|
88fa083b0507c9c0f0a020fef8a48d5e6a59f807
| 0
|
PIH/openmrs-module-emrmonitor,rubailly/openmrs-module-emrmonitor,PIH/openmrs-module-emrmonitor,rubailly/openmrs-module-emrmonitor,PIH/openmrs-module-emrmonitor,rubailly/openmrs-module-emrmonitor,PIH/openmrs-module-emrmonitor,rubailly/openmrs-module-emrmonitor
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.module.emrmonitor.api.db.hibernate;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Restrictions;
import org.openmrs.api.context.Context;
import org.openmrs.module.emrmonitor.EmrMonitorReport;
import org.openmrs.module.emrmonitor.EmrMonitorServer;
import org.openmrs.module.emrmonitor.EmrMonitorServerType;
import org.openmrs.module.emrmonitor.api.db.EmrMonitorDAO;
import java.beans.Expression;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* It is a default implementation of {@link EmrMonitorDAO}.
*/
public class HibernateEmrMonitorDAO implements EmrMonitorDAO {
protected final Log log = LogFactory.getLog(this.getClass());
private SessionFactory sessionFactory;
/**
* @param sessionFactory the sessionFactory to set
*/
public void setSessionFactory(SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
/**
* @return the sessionFactory
*/
public SessionFactory getSessionFactory() {
return sessionFactory;
}
@Override
public List<EmrMonitorServer> getAllEmrMonitorServers() {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorServer.class);
criteria.add(Restrictions.eq("voided", false));
List<EmrMonitorServer> list = null;
try {
list = (List<EmrMonitorServer>) criteria.list();
} catch (Exception e) {
log.error("Failed to retrieve emr monitor servers", e);
}
return list;
}
@Override
public EmrMonitorServer getEmrMonitorServerByUuid(String serverUuid) {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorServer.class);
criteria.add(Restrictions.eq("uuid", serverUuid));
try {
List<EmrMonitorServer> list = (List<EmrMonitorServer>) criteria.list();
if (list != null && list.size() > 0 ) {
return (EmrMonitorServer) list.get(0);
}
} catch (Exception e) {
log.error("Failed to retrieve emr monitor server record", e);
}
return null;
}
@Override
public List<EmrMonitorServer> getEmrMonitorServerByType(EmrMonitorServerType serverType) {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorServer.class);
criteria.add(Restrictions.eq("voided", false));
criteria.add(Restrictions.eq("serverType", serverType));
try {
List<EmrMonitorServer> list = (List<EmrMonitorServer>) criteria.list();
if (list != null && list.size() > 0 ) {
return list;
}
} catch (Exception e) {
log.error("Failed to retrieve emr monitor servers", e);
}
return null;
}
@Override
public EmrMonitorServer saveEmrMonitorServer(EmrMonitorServer server) {
try{
sessionFactory.getCurrentSession().saveOrUpdate(server);
} catch (Exception e) {
log.error("Error saving EmrMonitor Server", e);
}
return server;
}
@Override
public void deleteEmrMonitorServer(EmrMonitorServer server) {
sessionFactory.getCurrentSession().delete(server);
}
@Override
public EmrMonitorReport saveEmrMonitorReport(EmrMonitorReport report) {
try{
sessionFactory.getCurrentSession().saveOrUpdate(report);
} catch (Exception e) {
log.error("Error saving EmrMonitorReport", e);
}
return report;
}
@Override
public List<EmrMonitorReport> getEmrMonitorReportByServerAndStatus(EmrMonitorServer server, EmrMonitorReport.SubmissionStatus status) {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorReport.class);
criteria.add(Restrictions.eq("emrMonitorServer", server));
criteria.add(Restrictions.eq("status", status));
try {
List<EmrMonitorReport> list = (List<EmrMonitorReport>)criteria.list();
if (list != null && list.size() > 0) {
return list;
}
} catch (Exception e) {
log.error("failed to retrieve a list of reports", e);
}
return null;
}
@Override
public Map<String, String> getOpenmrsData() {
Map openmrsData =new HashMap<String, Integer>();
Session session=sessionFactory.getCurrentSession();
String sql="SELECT patient_id FROM orders where voided=0";
SQLQuery query=session.createSQLQuery(sql);
int numOrders=query.list().size();
openmrsData.put("orders", ""+numOrders);
String sql2="select patient_id from patient where voided=0";
SQLQuery query2=session.createSQLQuery(sql2);
int numPatients=query2.list().size();
openmrsData.put("patients", ""+numPatients);
String sql3="select patient_id from encounter where voided=0";
SQLQuery query3=session.createSQLQuery(sql3);
int numEncounters=query3.list().size();
openmrsData.put("encounters", ""+numEncounters);
String sql4="select person_id from obs where voided=0";
SQLQuery query4=session.createSQLQuery(sql4);
int numObs=query4.list().size();
openmrsData.put("observations", ""+numObs);
String sql5="select record_id from sync_record where state!='COMMITTED' and state!='NOT_SUPPOSED_TO_SYNC' and uuid=original_uuid";
SQLQuery query5=session.createSQLQuery(sql5);
int numPendingRecords=query5.list().size();
openmrsData.put("pendingRecords", ""+numPendingRecords);
String sql6="SELECT VERSION()";
SQLQuery query6=session.createSQLQuery(sql6);
String mysqlVersion=query6.list().get(0).toString();
openmrsData.put("mysqlVersion", ""+mysqlVersion);
String sql7="select record_id from sync_record where state in ('FAILED','FAILED_AND_STOPPED') and uuid=original_uuid";
SQLQuery query7=session.createSQLQuery(sql7);
int numFailedRecords=query7.list().size();
if(numFailedRecords>0)
openmrsData.put("failedRecord", "YES");
else
openmrsData.put("failedRecord", "NO");
String sql8="select contained_classes from sync_record where state='FAILED' and uuid=original_uuid";
SQLQuery query8=session.createSQLQuery(sql8);
String objectFailedFull="";
if(query8.list().size()!=0){
objectFailedFull=query8.list().get(0).toString();
}
openmrsData.put("failedObject", objectFailedFull);
String sql9="select contained_classes from sync_record where state='REJECTED' and uuid=original_uuid";
SQLQuery query9=session.createSQLQuery(sql9);
int rejectedObject=query9.list().size();
openmrsData.put("rejectedObject", ""+rejectedObject);
return openmrsData;
}
}
|
api/src/main/java/org/openmrs/module/emrmonitor/api/db/hibernate/HibernateEmrMonitorDAO.java
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.module.emrmonitor.api.db.hibernate;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.SQLQuery;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Restrictions;
import org.openmrs.api.context.Context;
import org.openmrs.module.emrmonitor.EmrMonitorReport;
import org.openmrs.module.emrmonitor.EmrMonitorServer;
import org.openmrs.module.emrmonitor.EmrMonitorServerType;
import org.openmrs.module.emrmonitor.api.db.EmrMonitorDAO;
import java.beans.Expression;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* It is a default implementation of {@link EmrMonitorDAO}.
*/
public class HibernateEmrMonitorDAO implements EmrMonitorDAO {
protected final Log log = LogFactory.getLog(this.getClass());
private SessionFactory sessionFactory;
/**
* @param sessionFactory the sessionFactory to set
*/
public void setSessionFactory(SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}
/**
* @return the sessionFactory
*/
public SessionFactory getSessionFactory() {
return sessionFactory;
}
@Override
public List<EmrMonitorServer> getAllEmrMonitorServers() {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorServer.class);
criteria.add(Restrictions.eq("voided", false));
List<EmrMonitorServer> list = null;
try {
list = (List<EmrMonitorServer>) criteria.list();
} catch (Exception e) {
log.error("Failed to retrieve emr monitor servers", e);
}
return list;
}
@Override
public EmrMonitorServer getEmrMonitorServerByUuid(String serverUuid) {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorServer.class);
criteria.add(Restrictions.eq("uuid", serverUuid));
try {
List<EmrMonitorServer> list = (List<EmrMonitorServer>) criteria.list();
if (list != null && list.size() > 0 ) {
return (EmrMonitorServer) list.get(0);
}
} catch (Exception e) {
log.error("Failed to retrieve emr monitor server record", e);
}
return null;
}
@Override
public List<EmrMonitorServer> getEmrMonitorServerByType(EmrMonitorServerType serverType) {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorServer.class);
criteria.add(Restrictions.eq("voided", false));
criteria.add(Restrictions.eq("serverType", serverType));
try {
List<EmrMonitorServer> list = (List<EmrMonitorServer>) criteria.list();
if (list != null && list.size() > 0 ) {
return list;
}
} catch (Exception e) {
log.error("Failed to retrieve emr monitor servers", e);
}
return null;
}
@Override
public EmrMonitorServer saveEmrMonitorServer(EmrMonitorServer server) {
try{
sessionFactory.getCurrentSession().saveOrUpdate(server);
} catch (Exception e) {
log.error("Error saving EmrMonitor Server", e);
}
return server;
}
@Override
public void deleteEmrMonitorServer(EmrMonitorServer server) {
sessionFactory.getCurrentSession().delete(server);
}
@Override
public EmrMonitorReport saveEmrMonitorReport(EmrMonitorReport report) {
try{
sessionFactory.getCurrentSession().saveOrUpdate(report);
} catch (Exception e) {
log.error("Error saving EmrMonitorReport", e);
}
return report;
}
@Override
public List<EmrMonitorReport> getEmrMonitorReportByServerAndStatus(EmrMonitorServer server, EmrMonitorReport.SubmissionStatus status) {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(EmrMonitorReport.class);
criteria.add(Restrictions.eq("emrMonitorServer", server));
criteria.add(Restrictions.eq("status", status));
try {
List<EmrMonitorReport> list = (List<EmrMonitorReport>)criteria.list();
if (list != null && list.size() > 0) {
return list;
}
} catch (Exception e) {
log.error("failed to retrieve a list of reports", e);
}
return null;
}
@Override
public Map<String, String> getOpenmrsData() {
Map openmrsData =new HashMap<String, Integer>();
Session session=sessionFactory.getCurrentSession();
String sql="SELECT patient_id FROM orders where voided=0";
SQLQuery query=session.createSQLQuery(sql);
int numOrders=query.list().size();
openmrsData.put("orders", ""+numOrders);
String sql2="select patient_id from patient where voided=0";
SQLQuery query2=session.createSQLQuery(sql2);
int numPatients=query2.list().size();
openmrsData.put("patients", ""+numPatients);
String sql3="select patient_id from encounter where voided=0";
SQLQuery query3=session.createSQLQuery(sql3);
int numEncounters=query3.list().size();
openmrsData.put("encounters", ""+numEncounters);
String sql4="select person_id from obs where voided=0";
SQLQuery query4=session.createSQLQuery(sql4);
int numObs=query4.list().size();
openmrsData.put("observations", ""+numObs);
String sql5="select record_id from sync_record where state!='COMMITTED' and state!='NOT_SUPPOSED_TO_SYNC' and uuid=original_uuid";
SQLQuery query5=session.createSQLQuery(sql5);
int numPendingRecords=query5.list().size();
openmrsData.put("pendingRecords", ""+numPendingRecords);
String sql6="SELECT VERSION()";
SQLQuery query6=session.createSQLQuery(sql6);
String mysqlVersion=query6.list().get(0).toString();
openmrsData.put("mysqlVersion", ""+mysqlVersion);
String sql7="select record_id from sync_record where state in ('FAILED','FAILED_AND_STOPPED') and uuid=original_uuid";
SQLQuery query7=session.createSQLQuery(sql7);
int numFailedRecords=query7.list().size();
if(numFailedRecords>0)
openmrsData.put("failedRecord", "YES");
else
openmrsData.put("failedRecord", "NO");
/*String sql8="select contained_classes from sync_record where state='FAILED' and uuid=original_uuid";
SQLQuery query8=session.createSQLQuery(sql8);
String objectFailedFull=query8.list().get(0).toString().split(",")[0];
String objectFailed=objectFailedFull.split(".")[(objectFailedFull.split(".").length)-1];
openmrsData.put("failedObject", objectFailed);
*/
String sql9="select contained_classes from sync_record where state='REJECTED' and uuid=original_uuid";
SQLQuery query9=session.createSQLQuery(sql9);
int rejectedObject=query9.list().size();
openmrsData.put("rejectedObject", ""+rejectedObject);
return openmrsData;
}
}
|
fixing ticket: RWA-407 Fail to retrieve additional OpenMRS metrics
HibernateEmrMonitorDAO.getOpenmrsData() throws
java.lang.IndexOutOfBoundsException
|
api/src/main/java/org/openmrs/module/emrmonitor/api/db/hibernate/HibernateEmrMonitorDAO.java
|
fixing ticket: RWA-407 Fail to retrieve additional OpenMRS metrics
|
|
Java
|
agpl-3.0
|
b46dca692d103012cb562305fced786f2d3598de
| 0
|
deepstupid/sphinx5
|
/*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electronic Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.jsapi;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import javax.speech.EngineException;
import javax.speech.recognition.GrammarException;
import javax.speech.recognition.GrammarSyntaxDetail;
import javax.speech.recognition.Recognizer;
import javax.speech.recognition.Rule;
import javax.speech.recognition.RuleAlternatives;
import javax.speech.recognition.RuleCount;
import javax.speech.recognition.RuleGrammar;
import javax.speech.recognition.RuleName;
import javax.speech.recognition.RuleParse;
import javax.speech.recognition.RuleSequence;
import javax.speech.recognition.RuleTag;
import javax.speech.recognition.RuleToken;
import com.sun.speech.engine.recognition.BaseRecognizer;
import edu.cmu.sphinx.linguist.language.grammar.Grammar;
import edu.cmu.sphinx.linguist.language.grammar.GrammarNode;
import edu.cmu.sphinx.util.LogMath;
import edu.cmu.sphinx.util.props.PropertyException;
import edu.cmu.sphinx.util.props.PropertySheet;
import edu.cmu.sphinx.util.props.PropertyType;
import edu.cmu.sphinx.util.props.Registry;
/**
* Defines a BNF-style grammar based on JSGF grammar rules in a file.
*
* <p>
* The Java Speech Grammar Format (JSGF) is a BNF-style, platform-independent,
* and vendor-independent textual representation of grammars for use in speech
* recognition. It is used by the <a
* href="http://java.sun.com/products/java-media/speech/">Java Speech API
* (JSAPI) </a>.
*
* <p>
* Here we only intend to give a couple of examples of grammars written in
* JSGF, so that you can quickly learn to write your own grammars. For more
* examples and a complete specification of JSGF, go to
* <p><a
* href="http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/">http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/
* </a>.
*
* <p>
* <b>Example 1: "Hello World" in JSGF </b>
*
* <p>
* The example below shows how a JSGF grammar that generates the sentences
* "Hello World":
*
* <p>
* <table width="100%" cellpadding="10">
* <tr>
* <td bgcolor="#DDDDDD">
*
* <pre>
* #JSGF V1.0
*
* public <helloWorld> = Hello World;
* </pre>
*
* </td>
* </tr>
* </table>
*
* <i>Figure 1: Hello grammar that generates the sentences "Hello World". </i>
*
* <p>
* The above grammar is saved in a file called "hello.gram". It defines a
* public grammar rule called "helloWorld". In order for this grammar rule to
* be publicly accessible, we must be declared it "public". Non-public grammar
* rules are not visible outside of the grammar file.
*
* <p>
* The location of the grammar file(s) is(are) defined by the
* {@link #PROP_BASE_GRAMMAR_URL baseGrammarURL}property. Since all JSGF
* grammar files end with ".gram", it will automatically search all such files
* at the given URL for the grammar. The name of the grammar to search for is
* specified by {@link #PROP_GRAMMAR_NAME grammarName}. In this example, the
* grammar name is "helloWorld".
*
* <p>
* <b>Example 2: Command Grammar in JSGF </b>
*
* <p>
* This examples shows a grammar that generates basic control commands like
* "move a menu thanks please", "close file", "oh mighty computer please kindly
* delete menu thanks". It is the same as one of the command & control examples
* in the <a
* href="http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/">JSGF
* specification </a>. It is considerably more complex than the previous
* example. It defines the public grammar called "basicCmd".
*
* <p>
* <table width="100%" cellpadding="10">
* <tr>
* <td bgcolor="#DDDDDD">
*
* <pre>
* #JSGF V1.0
*
* public <basicCmd> = <startPolite> <command> <endPolite>;
*
* <command> = <action> <object>;
* <action> = /10/ open |/2/ close |/1/ delete |/1/ move;
* <object> = [the | a] (window | file | menu);
*
* <startPolite> = (please | kindly | could you | oh mighty computer) *;
* <endPolite> = [ please | thanks | thank you ];
* </pre>
*
* </td>
* </tr>
* </table>
*
* <i>Figure 2: Command grammar that generates simple control commands. </i>
*
* <p>
* The features of JSGF that are shown in this example includes:
* <ul>
* <li>using other grammar rules within a grammar rule.
* <li>the OR "|" operator.
* <li>the grouping "(...)" operator.
* <li>the optional grouping "[...]" operator.
* <li>the zero-or-many "*" (called Kleene star) operator.
* <li>a probability (e.g., "open" is more likely than the others).
* </ul>
*
* <p>
* <h3>From JSGF to Grammar Graph</h3>
*
* After the JSGF grammar is read in, it is converted to a graph of words
* representing the grammar. Lets call this the grammar graph. It is from this
* grammar graph that the eventual search structure used for speech recognition
* is built. Below, we show the grammar graphs created from the above JSGF
* grammars. The nodes <code>"<sil>"</code> means "silence".
*
* <p>
* <img src="doc-files/helloWorld.jpg"> <br>
* <i>Figure 3: Grammar graph created from the Hello World grammar. </i>
*
* <p>
* <img src="doc-files/commandGrammar.jpg"> <br>
* <i>Figure 4: Grammar graph created from the Command grammar. </i>
*
* <p>
* <h3>Implementation Notes</h3>
* <ol>
* <li>This implementation does not support right-hand recursion.
* <li>All probabilities are maintained in LogMath log base.
* </ol>
*/
public class JSGFGrammar extends Grammar {
/**
* Sphinx property that defines the location of the JSGF grammar file.
*/
public final static String PROP_BASE_GRAMMAR_URL = "baseGrammarURL";
/**
* Default value for the location of the JSGF grammar file.
*/
public final static String PROP_BASE_GRAMMAR_URL_DEFAULT = "file:./";
/**
* Sphinx property that defines the location of the JSGF grammar file.
*/
public final static String PROP_GRAMMAR_NAME = "grammarName";
/**
* Default value for PROP_GRAMMAR_NAME
*/
public final static String PROP_GRAMMAR_NAME_DEFAULT = "default.gram";
/**
* Sphinx property that defines the logMath component.
*/
public final static String PROP_LOG_MATH = "logMath";
// ---------------------
// Configurable data
// ---------------------
private RuleGrammar ruleGrammar;
private int identity;
private Map ruleNameStack = new HashMap();
private Recognizer recognizer;
private String urlString;
private String grammarName;
private URL baseURL = null;
private LogMath logMath;
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#register(java.lang.String,
* edu.cmu.sphinx.util.props.Registry)
*/
public void register(String name, Registry registry)
throws PropertyException {
super.register(name, registry);
registry.register(PROP_BASE_GRAMMAR_URL, PropertyType.STRING);
registry.register(PROP_GRAMMAR_NAME, PropertyType.STRING);
registry.register(PROP_LOG_MATH, PropertyType.COMPONENT);
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public void newProperties(PropertySheet ps) throws PropertyException {
super.newProperties(ps);
urlString = ps.getString(PROP_BASE_GRAMMAR_URL,
PROP_BASE_GRAMMAR_URL_DEFAULT);
grammarName = ps
.getString(PROP_GRAMMAR_NAME, PROP_GRAMMAR_NAME_DEFAULT);
logMath = (LogMath) ps.getComponent(PROP_LOG_MATH, LogMath.class);
}
/**
* Returns the RuleGrammar of this JSGFGrammar.
*
* @return the RuleGrammar
*/
public RuleGrammar getRuleGrammar() {
return ruleGrammar;
}
/**
* Sets the URL context of the JSGF grammars.
*
* @param the URL context of the grammars
*/
public void setBaseURL(URL url) {
baseURL = url;
}
/**
* Creates the grammar.
*
* @return the initial node of the Grammar
*/
protected GrammarNode createGrammar() throws IOException {
identity = 0;
recognizer = new BaseRecognizer();
try {
if (baseURL == null) {
baseURL = new URL(urlString);
}
recognizer.allocate();
ruleGrammar = recognizer.loadJSGF(baseURL, grammarName);
recognizer.commitChanges();
ruleGrammar.setEnabled(true);
GrammarNode firstNode = createGrammarNode(identity++, "<sil>");
GrammarNode finalNode = createGrammarNode(identity++, "<sil>");
finalNode.setFinalNode(true);
// go through each rule and create a network of GrammarNodes
// for each of them
String[] ruleNames = ruleGrammar.listRuleNames();
for (int i = 0; i < ruleNames.length; i++) {
String ruleName = ruleNames[i];
if (ruleGrammar.isRulePublic(ruleName)) {
debugPrintln("New Rule: " + ruleName);
Rule rule = ruleGrammar.getRule(ruleName);
GrammarGraph graph = parseRule(rule);
firstNode.add(graph.getStartNode(), 0.0f);
graph.getEndNode().add(finalNode, 0.0f);
}
}
return firstNode;
} catch (EngineException ee) {
// ee.printStackTrace();
throw new IOException(ee.toString());
} catch (GrammarException ge) {
// ge.printStackTrace();
dumpGrammarException(ge);
throw new IOException("GrammarException: " + ge);
} catch (MalformedURLException mue) {
throw new IOException("bad base grammar url " + urlString + " "
+ mue);
}
}
/**
* Parses the given Rule into a network of GrammarNodes.
*
* @param rule
* the Rule to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRule(Rule rule) throws GrammarException {
GrammarGraph result;
if (rule != null) {
debugPrintln("parseRule: " + rule.toString());
}
if (rule instanceof RuleAlternatives) {
result = parseRuleAlternatives((RuleAlternatives) rule);
} else if (rule instanceof RuleCount) {
result = parseRuleCount((RuleCount) rule);
} else if (rule instanceof RuleName) {
result = parseRuleName((RuleName) rule);
} else if (rule instanceof RuleSequence) {
result = parseRuleSequence((RuleSequence) rule);
} else if (rule instanceof RuleTag) {
result = parseRuleTag((RuleTag) rule);
} else if (rule instanceof RuleToken) {
result = parseRuleToken((RuleToken) rule);
} else if (rule instanceof RuleParse) {
throw new IllegalArgumentException(
"Unsupported Rule type: RuleParse: " + rule.toString());
} else {
throw new IllegalArgumentException("Unsupported Rule type: "
+ rule.toString());
}
return result;
}
/**
* Parses the given RuleName into a network of GrammarNodes.
*
* @param initialRuleName
* the RuleName rule to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRuleName(RuleName initialRuleName)
throws GrammarException {
debugPrintln("parseRuleName: " + initialRuleName.toString());
GrammarGraph result = (GrammarGraph) ruleNameStack.get(initialRuleName
.getRuleName());
if (result != null) { // its a recursive call
return result;
} else {
result = new GrammarGraph();
ruleNameStack.put(initialRuleName.getRuleName(), result);
}
RuleName ruleName = ruleGrammar.resolve(initialRuleName);
if (ruleName == RuleName.NULL) {
result.getStartNode().add(result.getEndNode(), 0.0f);
} else if (ruleName == RuleName.VOID) {
// no connection for void
} else {
if (ruleName == null) {
throw new GrammarException("Can't resolve " + initialRuleName
+ " g " + initialRuleName.getFullGrammarName());
}
RuleGrammar rg = recognizer.getRuleGrammar(ruleName
.getFullGrammarName());
if (rg == null) {
throw new GrammarException("Can't resolve grammar name "
+ ruleName.getFullGrammarName());
}
Rule rule = rg.getRule(ruleName.getSimpleRuleName());
if (rule == null) {
throw new GrammarException("Can't resolve rule: "
+ ruleName.getRuleName());
}
GrammarGraph ruleResult = parseRule(rule);
result.getStartNode().add(ruleResult.getStartNode(), 0.0f);
ruleResult.getEndNode().add(result.getEndNode(), 0.0f);
ruleNameStack.remove(ruleName.getRuleName());
}
return result;
}
/**
* Parses the given RuleCount into a network of GrammarNodes.
*
* @param ruleCount
* the RuleCount object to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRuleCount(RuleCount ruleCount)
throws GrammarException {
debugPrintln("parseRuleCount: " + ruleCount);
GrammarGraph result = new GrammarGraph();
int count = ruleCount.getCount();
GrammarGraph newNodes = parseRule(ruleCount.getRule());
result.getStartNode().add(newNodes.getStartNode(), 0.0f);
newNodes.getEndNode().add(result.getEndNode(), 0.0f);
// if this is optional, add a bypass arc
if (count == RuleCount.ZERO_OR_MORE || count == RuleCount.OPTIONAL) {
result.getStartNode().add(result.getEndNode(), 0.0f);
}
// if this can possibly occur more than once, add a loopback
if (count == RuleCount.ONCE_OR_MORE || count == RuleCount.ZERO_OR_MORE) {
newNodes.getEndNode().add(newNodes.getStartNode(), 0.0f);
}
return result;
}
/**
* Parses the given RuleAlternatives into a network of GrammarNodes.
*
* @param ruleAlternatives
* the RuleAlternatives to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRuleAlternatives(RuleAlternatives ruleAlternatives)
throws GrammarException {
debugPrintln("parseRuleAlternatives: " + ruleAlternatives.toString());
GrammarGraph result = new GrammarGraph();
Rule[] rules = ruleAlternatives.getRules();
float[] weights = ruleAlternatives.getWeights();
normalizeWeights(weights);
// expand each alternative, and connect them in parallel
for (int i = 0; i < rules.length; i++) {
Rule rule = rules[i];
float weight = 0.0f;
if (weights != null) {
weight = weights[i];
}
debugPrintln("Alternative: " + rule.toString());
GrammarGraph newNodes = parseRule(rule);
result.getStartNode().add(newNodes.getStartNode(), weight);
newNodes.getEndNode().add(result.getEndNode(), 0.0f);
}
return result;
}
/**
* Normalize the weights. The weights should always be zero or greater. We
* need to convert the weights to a log probability.
*
* @param weights
* the weights to normalize
*/
private void normalizeWeights(float[] weights) {
if (weights != null) {
double sum = 0.0;
for (int i = 0; i < weights.length; i++) {
if (weights[i] < 0) {
throw new IllegalArgumentException("negative weight");
}
sum += weights[i];
}
for (int i = 0; i < weights.length; i++) {
if (sum == 0.0f) {
weights[i] = LogMath.getLogZero();
} else {
weights[i] = logMath.linearToLog(weights[i] / sum);
}
}
}
}
/**
* Parses the given RuleSequence into a network of GrammarNodes.
*
* @param ruleSequence
* the RuleSequence to parse
*
* @return the first and last GrammarNodes of the network
*/
private GrammarGraph parseRuleSequence(RuleSequence ruleSequence)
throws GrammarException {
GrammarNode startNode = null;
GrammarNode endNode = null;
debugPrintln("parseRuleSequence: " + ruleSequence);
Rule[] rules = ruleSequence.getRules();
GrammarNode lastGrammarNode = null;
// expand and connect each rule in the sequence serially
for (int i = 0; i < rules.length; i++) {
Rule rule = rules[i];
GrammarGraph newNodes = parseRule(rule);
// first node
if (i == 0) {
startNode = newNodes.getStartNode();
}
// last node
if (i == (rules.length - 1)) {
endNode = newNodes.getEndNode();
}
if (i > 0) {
lastGrammarNode.add(newNodes.getStartNode(), 0.0f);
}
lastGrammarNode = newNodes.getEndNode();
}
return new GrammarGraph(startNode, endNode);
}
/**
* Parses the given RuleTag into a network GrammarNodes.
*
* @param ruleTag
* the RuleTag to parse
*
* @return the first and last GrammarNodes of the network
*/
private GrammarGraph parseRuleTag(RuleTag ruleTag) throws GrammarException {
debugPrintln("parseRuleTag: " + ruleTag);
Rule rule = ruleTag.getRule();
return parseRule(rule);
}
/**
* Creates a GrammarNode with the word in the given RuleToken.
*
* @param ruleToken
* the RuleToken that contains the word
*
* @return a GrammarNode with the word in the given RuleToken
*/
private GrammarGraph parseRuleToken(RuleToken ruleToken) {
debugPrintln("parseRuleToken: " + ruleToken.toString());
GrammarNode node = createGrammarNode(identity++, ruleToken.getText());
return new GrammarGraph(node, node);
}
/**
* Dumps out a grammar exception
*
* @param ge
* the grammar exception
*
*/
private void dumpGrammarException(GrammarException ge) {
System.out.println("Grammar exception " + ge);
GrammarSyntaxDetail[] gsd = ge.getDetails();
if (gsd != null) {
for (int i = 0; i < gsd.length; i++) {
System.out.println("Grammar Name: " + gsd[i].grammarName);
System.out.println("Grammar Loc : " + gsd[i].grammarLocation);
System.out.println("Import Name : " + gsd[i].importName);
System.out.println("Line number : " + gsd[i].lineNumber);
System.out.println("char number : " + gsd[i].charNumber);
System.out.println("Rule name : " + gsd[i].ruleName);
System.out.println("Message : " + gsd[i].message);
}
}
}
/**
* Debugging println
*
* @param message
* the message to optionally print
*/
private void debugPrintln(String message) {
if (false) {
System.out.println(message);
}
}
/**
* Dumps interesting things about this grammar
*/
private void dumpGrammar() {
System.out.println("Imported rules { ");
RuleName[] imports = ruleGrammar.listImports();
for (int i = 0; i < imports.length; i++) {
System.out
.println(" Import " + i + " " + imports[i].getRuleName());
}
System.out.println("}");
System.out.println("Rulenames { ");
String[] names = ruleGrammar.listRuleNames();
for (int i = 0; i < names.length; i++) {
System.out.println(" Name " + i + " " + names[i]);
}
System.out.println("}");
}
/**
* Represents a graph of grammar nodes. A grammar graph has a single
* starting node and a single ending node
*/
class GrammarGraph {
private GrammarNode startNode;
private GrammarNode endNode;
/**
* Creates a grammar graph with the given nodes
*
* @param startNode
* the staring node of the graph
* @param endNode
* the ending node of the graph
*/
GrammarGraph(GrammarNode startNode, GrammarNode endNode) {
this.startNode = startNode;
this.endNode = endNode;
}
/**
* Creates a graph with non-word nodes for the start and ending nodes
*/
GrammarGraph() {
startNode = createGrammarNode(identity++, false);
endNode = createGrammarNode(identity++, false);
}
/**
* Gets the starting node
*
* @return the starting node for the graph
*/
GrammarNode getStartNode() {
return startNode;
}
/**
* Gets the ending node
*
* @return the ending node for the graph
*/
GrammarNode getEndNode() {
return endNode;
}
}
}
|
edu/cmu/sphinx/jsapi/JSGFGrammar.java
|
/*
* Copyright 1999-2002 Carnegie Mellon University.
* Portions Copyright 2002 Sun Microsystems, Inc.
* Portions Copyright 2002 Mitsubishi Electronic Research Laboratories.
* All Rights Reserved. Use is subject to license terms.
*
* See the file "license.terms" for information on usage and
* redistribution of this file, and for a DISCLAIMER OF ALL
* WARRANTIES.
*
*/
package edu.cmu.sphinx.jsapi;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
import javax.speech.EngineException;
import javax.speech.recognition.GrammarException;
import javax.speech.recognition.GrammarSyntaxDetail;
import javax.speech.recognition.Recognizer;
import javax.speech.recognition.Rule;
import javax.speech.recognition.RuleAlternatives;
import javax.speech.recognition.RuleCount;
import javax.speech.recognition.RuleGrammar;
import javax.speech.recognition.RuleName;
import javax.speech.recognition.RuleParse;
import javax.speech.recognition.RuleSequence;
import javax.speech.recognition.RuleTag;
import javax.speech.recognition.RuleToken;
import com.sun.speech.engine.recognition.BaseRecognizer;
import edu.cmu.sphinx.linguist.language.grammar.Grammar;
import edu.cmu.sphinx.linguist.language.grammar.GrammarNode;
import edu.cmu.sphinx.util.LogMath;
import edu.cmu.sphinx.util.props.PropertyException;
import edu.cmu.sphinx.util.props.PropertySheet;
import edu.cmu.sphinx.util.props.PropertyType;
import edu.cmu.sphinx.util.props.Registry;
/**
* Defines a BNF-style grammar based on JSGF grammar rules in a file.
*
* <p>
* The Java Speech Grammar Format (JSGF) is a BNF-style, platform-independent,
* and vendor-independent textual representation of grammars for use in speech
* recognition. It is used by the <a
* href="http://java.sun.com/products/java-media/speech/">Java Speech API
* (JSAPI) </a>.
*
* <p>
* Here we only intend to give a couple of examples of grammars written in
* JSGF, so that you can quickly learn to write your own grammars. For more
* examples and a complete specification of JSGF, go to
* <p><a
* href="http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/">http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/
* </a>.
*
* <p>
* <b>Example 1: "Hello World" in JSGF </b>
*
* <p>
* The example below shows how a JSGF grammar that generates the sentences
* "Hello World":
*
* <p>
* <table width="100%" cellpadding="10">
* <tr>
* <td bgcolor="#DDDDDD">
*
* <pre>
* #JSGF V1.0
*
* public <helloWorld> = Hello World;
* </pre>
*
* </td>
* </tr>
* </table>
*
* <i>Figure 1: Hello grammar that generates the sentences "Hello World". </i>
*
* <p>
* The above grammar is saved in a file called "hello.gram". It defines a
* public grammar rule called "helloWorld". In order for this grammar rule to
* be publicly accessible, we must be declared it "public". Non-public grammar
* rules are not visible outside of the grammar file.
*
* <p>
* The location of the grammar file(s) is(are) defined by the
* {@link #PROP_BASE_GRAMMAR_URL baseGrammarURL}property. Since all JSGF
* grammar files end with ".gram", it will automatically search all such files
* at the given URL for the grammar. The name of the grammar to search for is
* specified by {@link #PROP_GRAMMAR_NAME grammarName}. In this example, the
* grammar name is "helloWorld".
*
* <p>
* <b>Example 2: Command Grammar in JSGF </b>
*
* <p>
* This examples shows a grammar that generates basic control commands like
* "move a menu thanks please", "close file", "oh mighty computer please kindly
* delete menu thanks". It is the same as one of the command & control examples
* in the <a
* href="http://java.sun.com/products/java-media/speech/forDevelopers/JSGF/">JSGF
* specification </a>. It is considerably more complex than the previous
* example. It defines the public grammar called "basicCmd".
*
* <p>
* <table width="100%" cellpadding="10">
* <tr>
* <td bgcolor="#DDDDDD">
*
* <pre>
* #JSGF V1.0
*
* public <basicCmd> = <startPolite> <command> <endPolite>;
*
* <command> = <action> <object>;
* <action> = /10/ open |/2/ close |/1/ delete |/1/ move;
* <object> = [the | a] (window | file | menu);
*
* <startPolite> = (please | kindly | could you | oh mighty computer) *;
* <endPolite> = [ please | thanks | thank you ];
* </pre>
*
* </td>
* </tr>
* </table>
*
* <i>Figure 2: Command grammar that generates simple control commands. </i>
*
* <p>
* The features of JSGF that are shown in this example includes:
* <ul>
* <li>using other grammar rules within a grammar rule.
* <li>the OR "|" operator.
* <li>the grouping "(...)" operator.
* <li>the optional grouping "[...]" operator.
* <li>the zero-or-many "*" (called Kleene star) operator.
* <li>a probability (e.g., "open" is more likely than the others).
* </ul>
*
* <p>
* <h3>From JSGF to Grammar Graph</h3>
*
* After the JSGF grammar is read in, it is converted to a graph of words
* representing the grammar. Lets call this the grammar graph. It is from this
* grammar graph that the eventual search structure used for speech recognition
* is built. Below, we show the grammar graphs created from the above JSGF
* grammars. The nodes <code>"<sil>"</code> means "silence".
*
* <p>
* <img src="doc-files/helloWorld.jpg"> <br>
* <i>Figure 3: Grammar graph created from the Hello World grammar. </i>
*
* <p>
* <img src="doc-files/commandGrammar.jpg"> <br>
* <i>Figure 4: Grammar graph created from the Command grammar. </i>
*
* <p>
* <h3>Implementation Notes</h3>
* <ol>
* <li>This implementation does not support right-hand recursion.
* <li>All probabilities are maintained in LogMath log base.
* </ol>
*/
public class JSGFGrammar extends Grammar {
/**
* Sphinx property that defines the location of the JSGF grammar file.
*/
public final static String PROP_BASE_GRAMMAR_URL = "baseGrammarURL";
/**
* Default value for the location of the JSGF grammar file.
*/
public final static String PROP_BASE_GRAMMAR_URL_DEFAULT = "file:./";
/**
* Sphinx property that defines the location of the JSGF grammar file.
*/
public final static String PROP_GRAMMAR_NAME = "grammarName";
/**
* Default value for PROP_GRAMMAR_NAME
*/
public final static String PROP_GRAMMAR_NAME_DEFAULT = "default.gram";
/**
* Sphinx property that defines the logMath component.
*/
public final static String PROP_LOG_MATH = "logMath";
// ---------------------
// Configurable data
// ---------------------
private RuleGrammar ruleGrammar;
private int identity;
private Map ruleNameStack = new HashMap();
private Recognizer recognizer;
private String urlString;
private String grammarName;
private URL baseURL = null;
private LogMath logMath;
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#register(java.lang.String,
* edu.cmu.sphinx.util.props.Registry)
*/
public void register(String name, Registry registry)
throws PropertyException {
super.register(name, registry);
registry.register(PROP_BASE_GRAMMAR_URL, PropertyType.STRING);
registry.register(PROP_GRAMMAR_NAME, PropertyType.STRING);
registry.register(PROP_LOG_MATH, PropertyType.COMPONENT);
}
/*
* (non-Javadoc)
*
* @see edu.cmu.sphinx.util.props.Configurable#newProperties(edu.cmu.sphinx.util.props.PropertySheet)
*/
public void newProperties(PropertySheet ps) throws PropertyException {
super.newProperties(ps);
urlString = ps.getString(PROP_BASE_GRAMMAR_URL,
PROP_BASE_GRAMMAR_URL_DEFAULT);
grammarName = ps
.getString(PROP_GRAMMAR_NAME, PROP_GRAMMAR_NAME_DEFAULT);
logMath = (LogMath) ps.getComponent(PROP_LOG_MATH, LogMath.class);
}
/**
* Returns the RuleGrammar of this JSGFGrammar.
*
* @return the RuleGrammar
*/
public RuleGrammar getRuleGrammar() {
return ruleGrammar;
}
/**
* Creates the grammar.
*
* @return the initial node of the Grammar
*/
protected GrammarNode createGrammar() throws IOException {
identity = 0;
recognizer = new BaseRecognizer();
try {
baseURL = new URL(urlString);
recognizer.allocate();
ruleGrammar = recognizer.loadJSGF(baseURL, grammarName);
recognizer.commitChanges();
ruleGrammar.setEnabled(true);
GrammarNode firstNode = createGrammarNode(identity++, "<sil>");
GrammarNode finalNode = createGrammarNode(identity++, "<sil>");
finalNode.setFinalNode(true);
// go through each rule and create a network of GrammarNodes
// for each of them
String[] ruleNames = ruleGrammar.listRuleNames();
for (int i = 0; i < ruleNames.length; i++) {
String ruleName = ruleNames[i];
if (ruleGrammar.isRulePublic(ruleName)) {
debugPrintln("New Rule: " + ruleName);
Rule rule = ruleGrammar.getRule(ruleName);
GrammarGraph graph = parseRule(rule);
firstNode.add(graph.getStartNode(), 0.0f);
graph.getEndNode().add(finalNode, 0.0f);
}
}
return firstNode;
} catch (EngineException ee) {
// ee.printStackTrace();
throw new IOException(ee.toString());
} catch (GrammarException ge) {
// ge.printStackTrace();
dumpGrammarException(ge);
throw new IOException("GrammarException: " + ge);
} catch (MalformedURLException mue) {
throw new IOException("bad base grammar url " + urlString + " "
+ mue);
}
}
/**
* Parses the given Rule into a network of GrammarNodes.
*
* @param rule
* the Rule to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRule(Rule rule) throws GrammarException {
GrammarGraph result;
if (rule != null) {
debugPrintln("parseRule: " + rule.toString());
}
if (rule instanceof RuleAlternatives) {
result = parseRuleAlternatives((RuleAlternatives) rule);
} else if (rule instanceof RuleCount) {
result = parseRuleCount((RuleCount) rule);
} else if (rule instanceof RuleName) {
result = parseRuleName((RuleName) rule);
} else if (rule instanceof RuleSequence) {
result = parseRuleSequence((RuleSequence) rule);
} else if (rule instanceof RuleTag) {
result = parseRuleTag((RuleTag) rule);
} else if (rule instanceof RuleToken) {
result = parseRuleToken((RuleToken) rule);
} else if (rule instanceof RuleParse) {
throw new IllegalArgumentException(
"Unsupported Rule type: RuleParse: " + rule.toString());
} else {
throw new IllegalArgumentException("Unsupported Rule type: "
+ rule.toString());
}
return result;
}
/**
* Parses the given RuleName into a network of GrammarNodes.
*
* @param initialRuleName
* the RuleName rule to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRuleName(RuleName initialRuleName)
throws GrammarException {
debugPrintln("parseRuleName: " + initialRuleName.toString());
GrammarGraph result = (GrammarGraph) ruleNameStack.get(initialRuleName
.getRuleName());
if (result != null) { // its a recursive call
return result;
} else {
result = new GrammarGraph();
ruleNameStack.put(initialRuleName.getRuleName(), result);
}
RuleName ruleName = ruleGrammar.resolve(initialRuleName);
if (ruleName == RuleName.NULL) {
result.getStartNode().add(result.getEndNode(), 0.0f);
} else if (ruleName == RuleName.VOID) {
// no connection for void
} else {
if (ruleName == null) {
throw new GrammarException("Can't resolve " + initialRuleName
+ " g " + initialRuleName.getFullGrammarName());
}
RuleGrammar rg = recognizer.getRuleGrammar(ruleName
.getFullGrammarName());
if (rg == null) {
throw new GrammarException("Can't resolve grammar name "
+ ruleName.getFullGrammarName());
}
Rule rule = rg.getRule(ruleName.getSimpleRuleName());
if (rule == null) {
throw new GrammarException("Can't resolve rule: "
+ ruleName.getRuleName());
}
GrammarGraph ruleResult = parseRule(rule);
result.getStartNode().add(ruleResult.getStartNode(), 0.0f);
ruleResult.getEndNode().add(result.getEndNode(), 0.0f);
ruleNameStack.remove(ruleName.getRuleName());
}
return result;
}
/**
* Parses the given RuleCount into a network of GrammarNodes.
*
* @param ruleCount
* the RuleCount object to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRuleCount(RuleCount ruleCount)
throws GrammarException {
debugPrintln("parseRuleCount: " + ruleCount);
GrammarGraph result = new GrammarGraph();
int count = ruleCount.getCount();
GrammarGraph newNodes = parseRule(ruleCount.getRule());
result.getStartNode().add(newNodes.getStartNode(), 0.0f);
newNodes.getEndNode().add(result.getEndNode(), 0.0f);
// if this is optional, add a bypass arc
if (count == RuleCount.ZERO_OR_MORE || count == RuleCount.OPTIONAL) {
result.getStartNode().add(result.getEndNode(), 0.0f);
}
// if this can possibly occur more than once, add a loopback
if (count == RuleCount.ONCE_OR_MORE || count == RuleCount.ZERO_OR_MORE) {
newNodes.getEndNode().add(newNodes.getStartNode(), 0.0f);
}
return result;
}
/**
* Parses the given RuleAlternatives into a network of GrammarNodes.
*
* @param ruleAlternatives
* the RuleAlternatives to parse
*
* @return a grammar graph
*/
private GrammarGraph parseRuleAlternatives(RuleAlternatives ruleAlternatives)
throws GrammarException {
debugPrintln("parseRuleAlternatives: " + ruleAlternatives.toString());
GrammarGraph result = new GrammarGraph();
Rule[] rules = ruleAlternatives.getRules();
float[] weights = ruleAlternatives.getWeights();
normalizeWeights(weights);
// expand each alternative, and connect them in parallel
for (int i = 0; i < rules.length; i++) {
Rule rule = rules[i];
float weight = 0.0f;
if (weights != null) {
weight = weights[i];
}
debugPrintln("Alternative: " + rule.toString());
GrammarGraph newNodes = parseRule(rule);
result.getStartNode().add(newNodes.getStartNode(), weight);
newNodes.getEndNode().add(result.getEndNode(), 0.0f);
}
return result;
}
/**
* Normalize the weights. The weights should always be zero or greater. We
* need to convert the weights to a log probability.
*
* @param weights
* the weights to normalize
*/
private void normalizeWeights(float[] weights) {
if (weights != null) {
double sum = 0.0;
for (int i = 0; i < weights.length; i++) {
if (weights[i] < 0) {
throw new IllegalArgumentException("negative weight");
}
sum += weights[i];
}
for (int i = 0; i < weights.length; i++) {
if (sum == 0.0f) {
weights[i] = LogMath.getLogZero();
} else {
weights[i] = logMath.linearToLog(weights[i] / sum);
}
}
}
}
/**
* Parses the given RuleSequence into a network of GrammarNodes.
*
* @param ruleSequence
* the RuleSequence to parse
*
* @return the first and last GrammarNodes of the network
*/
private GrammarGraph parseRuleSequence(RuleSequence ruleSequence)
throws GrammarException {
GrammarNode startNode = null;
GrammarNode endNode = null;
debugPrintln("parseRuleSequence: " + ruleSequence);
Rule[] rules = ruleSequence.getRules();
GrammarNode lastGrammarNode = null;
// expand and connect each rule in the sequence serially
for (int i = 0; i < rules.length; i++) {
Rule rule = rules[i];
GrammarGraph newNodes = parseRule(rule);
// first node
if (i == 0) {
startNode = newNodes.getStartNode();
}
// last node
if (i == (rules.length - 1)) {
endNode = newNodes.getEndNode();
}
if (i > 0) {
lastGrammarNode.add(newNodes.getStartNode(), 0.0f);
}
lastGrammarNode = newNodes.getEndNode();
}
return new GrammarGraph(startNode, endNode);
}
/**
* Parses the given RuleTag into a network GrammarNodes.
*
* @param ruleTag
* the RuleTag to parse
*
* @return the first and last GrammarNodes of the network
*/
private GrammarGraph parseRuleTag(RuleTag ruleTag) throws GrammarException {
debugPrintln("parseRuleTag: " + ruleTag);
Rule rule = ruleTag.getRule();
return parseRule(rule);
}
/**
* Creates a GrammarNode with the word in the given RuleToken.
*
* @param ruleToken
* the RuleToken that contains the word
*
* @return a GrammarNode with the word in the given RuleToken
*/
private GrammarGraph parseRuleToken(RuleToken ruleToken) {
debugPrintln("parseRuleToken: " + ruleToken.toString());
GrammarNode node = createGrammarNode(identity++, ruleToken.getText());
return new GrammarGraph(node, node);
}
/**
* Dumps out a grammar exception
*
* @param ge
* the grammar exception
*
*/
private void dumpGrammarException(GrammarException ge) {
System.out.println("Grammar exception " + ge);
GrammarSyntaxDetail[] gsd = ge.getDetails();
if (gsd != null) {
for (int i = 0; i < gsd.length; i++) {
System.out.println("Grammar Name: " + gsd[i].grammarName);
System.out.println("Grammar Loc : " + gsd[i].grammarLocation);
System.out.println("Import Name : " + gsd[i].importName);
System.out.println("Line number : " + gsd[i].lineNumber);
System.out.println("char number : " + gsd[i].charNumber);
System.out.println("Rule name : " + gsd[i].ruleName);
System.out.println("Message : " + gsd[i].message);
}
}
}
/**
* Debugging println
*
* @param message
* the message to optionally print
*/
private void debugPrintln(String message) {
if (false) {
System.out.println(message);
}
}
/**
* Dumps interesting things about this grammar
*/
private void dumpGrammar() {
System.out.println("Imported rules { ");
RuleName[] imports = ruleGrammar.listImports();
for (int i = 0; i < imports.length; i++) {
System.out
.println(" Import " + i + " " + imports[i].getRuleName());
}
System.out.println("}");
System.out.println("Rulenames { ");
String[] names = ruleGrammar.listRuleNames();
for (int i = 0; i < names.length; i++) {
System.out.println(" Name " + i + " " + names[i]);
}
System.out.println("}");
}
/**
* Represents a graph of grammar nodes. A grammar graph has a single
* starting node and a single ending node
*/
class GrammarGraph {
private GrammarNode startNode;
private GrammarNode endNode;
/**
* Creates a grammar graph with the given nodes
*
* @param startNode
* the staring node of the graph
* @param endNode
* the ending node of the graph
*/
GrammarGraph(GrammarNode startNode, GrammarNode endNode) {
this.startNode = startNode;
this.endNode = endNode;
}
/**
* Creates a graph with non-word nodes for the start and ending nodes
*/
GrammarGraph() {
startNode = createGrammarNode(identity++, false);
endNode = createGrammarNode(identity++, false);
}
/**
* Gets the starting node
*
* @return the starting node for the graph
*/
GrammarNode getStartNode() {
return startNode;
}
/**
* Gets the ending node
*
* @return the ending node for the graph
*/
GrammarNode getEndNode() {
return endNode;
}
}
}
|
Added a method 'setBaseURL' to JSGFGrammar.
git-svn-id: a8b04003a33e1d3e001b9d20391fa392a9f62d91@3044 94700074-3cef-4d97-a70e-9c8c206c02f5
|
edu/cmu/sphinx/jsapi/JSGFGrammar.java
|
Added a method 'setBaseURL' to JSGFGrammar.
|
|
Java
|
agpl-3.0
|
42f6f6882d70ac0444500dc309247a540beb543e
| 0
|
tobwiens/scheduling,mbenguig/scheduling,zeineb/scheduling,fviale/scheduling,tobwiens/scheduling,ow2-proactive/scheduling,ow2-proactive/scheduling,ShatalovYaroslav/scheduling,fviale/scheduling,ow2-proactive/scheduling,jrochas/scheduling,yinan-liu/scheduling,ow2-proactive/scheduling,paraita/scheduling,ShatalovYaroslav/scheduling,yinan-liu/scheduling,marcocast/scheduling,fviale/scheduling,marcocast/scheduling,tobwiens/scheduling,tobwiens/scheduling,fviale/scheduling,ShatalovYaroslav/scheduling,marcocast/scheduling,ShatalovYaroslav/scheduling,zeineb/scheduling,mbenguig/scheduling,paraita/scheduling,paraita/scheduling,fviale/scheduling,paraita/scheduling,yinan-liu/scheduling,ow2-proactive/scheduling,mbenguig/scheduling,mbenguig/scheduling,yinan-liu/scheduling,zeineb/scheduling,zeineb/scheduling,paraita/scheduling,jrochas/scheduling,fviale/scheduling,jrochas/scheduling,yinan-liu/scheduling,zeineb/scheduling,jrochas/scheduling,ow2-proactive/scheduling,jrochas/scheduling,marcocast/scheduling,ShatalovYaroslav/scheduling,mbenguig/scheduling,zeineb/scheduling,fviale/scheduling,mbenguig/scheduling,ow2-proactive/scheduling,tobwiens/scheduling,ShatalovYaroslav/scheduling,ShatalovYaroslav/scheduling,mbenguig/scheduling,yinan-liu/scheduling,yinan-liu/scheduling,jrochas/scheduling,tobwiens/scheduling,paraita/scheduling,marcocast/scheduling,jrochas/scheduling,paraita/scheduling,zeineb/scheduling,marcocast/scheduling,marcocast/scheduling,tobwiens/scheduling
|
/*
* ProActive Parallel Suite(TM):
* The Open Source library for parallel and distributed
* Workflows & Scheduling, Orchestration, Cloud Automation
* and Big Data Analysis on Enterprise Grids & Clouds.
*
* Copyright (c) 2007 - 2017 ActiveEon
* Contact: contact@activeeon.com
*
* This library is free software: you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation: version 3 of
* the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*/
package org.ow2.proactive_grid_cloud_portal.cli;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.ParseException;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import com.google.common.collect.ImmutableList;
/**
* The class is in charge to test that all
* {@link org.ow2.proactive_grid_cloud_portal.cli.CommandSet.Entry} defined in
* {@link CommandSet} class have their number of expected parameters that match
* the number of tokens used the argNames.
*/
@RunWith(Parameterized.class)
public class CommandSetTest {
private CommandSet.Entry entry;
// field used by reflection with Junit
private String testName;
public CommandSetTest(CommandSet.Entry entry, String testName) {
this.entry = entry;
this.testName = testName;
}
@Test
public void testThatArgNamesMatchNumberOfArgs() throws ParseException, IllegalAccessException {
int nbArgsBasedOnName = Option.UNINITIALIZED;
String regex = "\\[(.*?)\\]";
Pattern pattern = Pattern.compile(regex);
String optionalArgNames = "";
int optionals = 0;
if (entry.argNames() != null) {
String argNames = entry.argNames();
Matcher matcher = pattern.matcher(argNames);
while (matcher.find()) {
optionals = matcher.group().split(" ").length;
optionalArgNames = matcher.group();
}
if (!argNames.trim().isEmpty()) {
nbArgsBasedOnName = argNames.split(" ").length;
if (entry.hasArgs()) {
nbArgsBasedOnName = nbArgsBasedOnName - optionals;
} else if (entry.hasOptionalArg()) {
nbArgsBasedOnName = optionals;
}
}
if (argNames.contains("...") && !optionalArgNames.contains("|") && !optionalArgNames.contains("...")) {
nbArgsBasedOnName = Option.UNLIMITED_VALUES;
}
}
if (entry.numOfArgs() != nbArgsBasedOnName) {
nbArgsBasedOnName = numberComplexOptionalArgs(entry);
}
Assert.assertEquals("Option '" + entry.longOpt() + "' does not have argNames matching number of args",
entry.numOfArgs(), nbArgsBasedOnName);
}
public int numberComplexOptionalArgs(CommandSet.Entry entry) {
int numberArgs = 0;
final String regex = "(\\\\w+)=\\\"([^\\\"]+)\\\"|([^\\\\s]+)";
final Pattern pattern = Pattern.compile(regex);
final Matcher matcher = pattern.matcher(entry.argNames());
while (matcher.find()) {
numberArgs = matcher.group().split(" ").length;
}
return numberArgs;
}
@Parameterized.Parameters(name = "{1}")
public static Collection<Object[]> data() throws IllegalAccessException {
List<CommandSet.Entry> availableCommands = getAvailableCommands();
Object[][] result = new Object[availableCommands.size()][2];
for (int i = 0; i < availableCommands.size(); i++) {
CommandSet.Entry command = availableCommands.get(i);
String name = command.longOpt();
if (name == null) {
name = command.jsCommand();
}
result[i][0] = command;
result[i][1] = "testThatArgNamesMatchNumberOfArgsForOption" + name;
}
return ImmutableList.copyOf(result);
}
private static ImmutableList<CommandSet.Entry> getAvailableCommands() throws IllegalAccessException {
Class<CommandSet> commandSetClass = CommandSet.class;
Field[] declaredFields = commandSetClass.getFields();
ImmutableList.Builder<CommandSet.Entry> builder = ImmutableList.builder();
for (Field field : declaredFields) {
if (field.getType().isAssignableFrom(CommandSet.Entry.class)) {
builder.add((CommandSet.Entry) field.get(null));
}
}
return builder.build();
}
}
|
rest/rest-cli/src/test/java/org/ow2/proactive_grid_cloud_portal/cli/CommandSetTest.java
|
/*
* ProActive Parallel Suite(TM):
* The Open Source library for parallel and distributed
* Workflows & Scheduling, Orchestration, Cloud Automation
* and Big Data Analysis on Enterprise Grids & Clouds.
*
* Copyright (c) 2007 - 2017 ActiveEon
* Contact: contact@activeeon.com
*
* This library is free software: you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation: version 3 of
* the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*/
package org.ow2.proactive_grid_cloud_portal.cli;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.ParseException;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import com.google.common.collect.ImmutableList;
/**
* The class is in charge to test that all
* {@link org.ow2.proactive_grid_cloud_portal.cli.CommandSet.Entry} defined in
* {@link CommandSet} class have their number of expected parameters that match
* the number of tokens used the argNames.
*/
@RunWith(Parameterized.class)
public class CommandSetTest {
private CommandSet.Entry entry;
// field used by reflection with Junit
private String testName;
public CommandSetTest(CommandSet.Entry entry, String testName) {
this.entry = entry;
this.testName = testName;
}
@Test
public void testThatArgNamesMatchNumberOfArgs() throws ParseException, IllegalAccessException {
int nbArgsBasedOnName = Option.UNINITIALIZED;
String regex = "\\[(.*?)\\]"; // \\[(.*?)\\]
// (\\w+)=\"([^\"]+)\"|([^\\s]+)
// String regex = "([a-zA-Z.-]+)=((?:\"[^\"]+\"|[^ ])+)";
Pattern pattern = Pattern.compile(regex);
String optionalArgNames = "";
int optionals = 0;
if (entry.argNames() != null) {
String argNames = entry.argNames();
Matcher matcher = pattern.matcher(argNames);
while (matcher.find()) {
optionals = matcher.group().split(" ").length;
optionalArgNames = matcher.group();
}
if (!argNames.trim().isEmpty()) {
nbArgsBasedOnName = argNames.split(" ").length;
if (entry.hasArgs()) {
nbArgsBasedOnName = nbArgsBasedOnName - optionals;
} else if (entry.hasOptionalArg()) {
nbArgsBasedOnName = optionals;
}
}
if (argNames.contains("...") && !optionalArgNames.contains("|") && !optionalArgNames.contains("...")) {
nbArgsBasedOnName = Option.UNLIMITED_VALUES;
}
}
if (entry.numOfArgs() != nbArgsBasedOnName) {
nbArgsBasedOnName = numberComplexOptionalArgs(entry);
}
Assert.assertEquals("Option '" + entry.longOpt() + "' does not have argNames matching number of args",
entry.numOfArgs(),
nbArgsBasedOnName);
}
public int numberComplexOptionalArgs(CommandSet.Entry entry) {
int numberArgs = 0;
final String regex = "(\\\\w+)=\\\"([^\\\"]+)\\\"|([^\\\\s]+)";
final Pattern pattern = Pattern.compile(regex);
final Matcher matcher = pattern.matcher(entry.argNames());
while (matcher.find()) {
numberArgs = matcher.group().split(" ").length;
}
return numberArgs;
}
@Parameterized.Parameters(name = "{1}")
public static Collection<Object[]> data() throws IllegalAccessException {
List<CommandSet.Entry> availableCommands = getAvailableCommands();
Object[][] result = new Object[availableCommands.size()][2];
for (int i = 0; i < availableCommands.size(); i++) {
CommandSet.Entry command = availableCommands.get(i);
String name = command.longOpt();
if (name == null) {
name = command.jsCommand();
}
result[i][0] = command;
result[i][1] = "testThatArgNamesMatchNumberOfArgsForOption" + name;
}
return ImmutableList.copyOf(result);
}
private static ImmutableList<CommandSet.Entry> getAvailableCommands() throws IllegalAccessException {
Class<CommandSet> commandSetClass = CommandSet.class;
Field[] declaredFields = commandSetClass.getFields();
ImmutableList.Builder<CommandSet.Entry> builder = ImmutableList.builder();
for (Field field : declaredFields) {
if (field.getType().isAssignableFrom(CommandSet.Entry.class)) {
builder.add((CommandSet.Entry) field.get(null));
}
}
return builder.build();
}
}
|
Fix the SubmitJobCommand number of arguments
|
rest/rest-cli/src/test/java/org/ow2/proactive_grid_cloud_portal/cli/CommandSetTest.java
|
Fix the SubmitJobCommand number of arguments
|
|
Java
|
apache-2.0
|
534dc01cd50f7066c0e27987dd77616403544f16
| 0
|
apache/openwebbeans,apache/openwebbeans,apache/openwebbeans
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.webbeans.lifecycle;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.enterprise.inject.spi.BeanManager;
import org.apache.webbeans.config.BeansDeployer;
import org.apache.webbeans.config.OWBLogConst;
import org.apache.webbeans.config.WebBeansContext;
import org.apache.webbeans.config.WebBeansFinder;
import org.apache.webbeans.container.BeanManagerImpl;
import org.apache.webbeans.container.InjectableBeanManager;
import org.apache.webbeans.container.InjectionResolver;
import org.apache.webbeans.portable.events.discovery.BeforeShutdownImpl;
import org.apache.webbeans.spi.ContainerLifecycle;
import org.apache.webbeans.spi.ContextsService;
import org.apache.webbeans.spi.JNDIService;
import org.apache.webbeans.spi.ScannerService;
import org.apache.webbeans.util.WebBeansConstants;
import org.apache.webbeans.util.WebBeansUtil;
public abstract class AbstractLifeCycle implements ContainerLifecycle
{
protected AtomicBoolean started = new AtomicBoolean(false);
//Logger instance
protected Logger logger;
/**Discover bean classes*/
protected ScannerService scannerService;
protected final ContextsService contextsService;
/**Deploy discovered beans*/
private final BeansDeployer deployer;
/**Using for lookup operations*/
private final JNDIService jndiService;
/**Root container.*/
private final BeanManagerImpl beanManager;
protected final WebBeansContext webBeansContext;
protected AbstractLifeCycle()
{
this(null);
}
protected AbstractLifeCycle(Properties properties)
{
this(properties, WebBeansContext.getInstance());
}
protected AbstractLifeCycle(Properties properties, WebBeansContext webBeansContext)
{
beforeInitApplication(properties);
this.webBeansContext = webBeansContext;
beanManager = this.webBeansContext.getBeanManagerImpl();
deployer = new BeansDeployer(webBeansContext);
jndiService = this.webBeansContext.getService(JNDIService.class);
scannerService = this.webBeansContext.getScannerService();
contextsService = this.webBeansContext.getService(ContextsService.class);
initApplication(properties);
}
public WebBeansContext getWebBeansContext()
{
return webBeansContext;
}
@Override
public BeanManager getBeanManager()
{
return new InjectableBeanManager(beanManager);
}
@Override
public void startApplication(Object startupObject)
{
bootstrapApplication(startupObject);
}
protected synchronized void bootstrapApplication(Object startupObject)
{
if (started.get())
{
return;
}
// Initalize Application Context
logger.info(OWBLogConst.INFO_0005);
long begin = System.currentTimeMillis();
//Before Start
beforeStartApplication(startupObject);
//Load all plugins
webBeansContext.getPluginLoader().startUp();
//Initialize contexts
contextsService.init(startupObject);
//Scanning process
logger.fine("Scanning classpaths for beans artifacts.");
//Scan
scannerService.scan();
//Deploy beans
logger.fine("Deploying scanned beans.");
//Deploy
deployer.deploy(scannerService);
//Start actual starting on sub-classes
afterStartApplication(startupObject);
if (logger.isLoggable(Level.INFO))
{
logger.log(Level.INFO, OWBLogConst.INFO_0001, Long.toString(System.currentTimeMillis() - begin));
}
started.set(true);
}
@Override
public void stopApplication(Object endObject)
{
logger.fine("OpenWebBeans Container is stopping.");
try
{
//Sub-classes operations
beforeStopApplication(endObject);
//Fire shut down
beanManager.fireLifecycleEvent(new BeforeShutdownImpl());
contextsService.destroy(endObject);
//Unbind BeanManager
jndiService.unbind(WebBeansConstants.WEB_BEANS_MANAGER_JNDI_NAME);
//Free all plugin resources
webBeansContext.getPluginLoader().shutDown();
//Clear extensions
webBeansContext.getExtensionLoader().clear();
//Delete Resolutions Cache
InjectionResolver injectionResolver = webBeansContext.getBeanManagerImpl().getInjectionResolver();
injectionResolver.clearCaches();
//Delete AnnotateTypeCache
webBeansContext.getAnnotatedElementFactory().clear();
//After Stop
afterStopApplication(endObject);
// Clear BeanManager
beanManager.clear();
}
catch (Exception e)
{
if (logger.isLoggable(Level.SEVERE))
{
logger.log(Level.SEVERE, OWBLogConst.ERROR_0021, e);
}
}
finally
{
started.set(false);
// Clear singleton list
WebBeansFinder.clearInstances(WebBeansUtil.getCurrentClassLoader());
}
}
/**
* @return the contextsService
*/
@Override
public ContextsService getContextService()
{
return contextsService;
}
@Override
public void initApplication(Properties properties)
{
afterInitApplication(properties);
}
protected void beforeInitApplication(Properties properties)
{
//Do nothing as default
}
protected void afterInitApplication(Properties properties)
{
//Do nothing as default
}
protected void afterStartApplication(Object startupObject)
{
//Do nothing as default
}
protected void afterStopApplication(Object stopObject)
{
//Do nothing as default
}
protected void beforeStartApplication(Object startupObject)
{
//Do nothing as default
}
protected void beforeStopApplication(Object stopObject)
{
//Do nothing as default
}
}
|
webbeans-impl/src/main/java/org/apache/webbeans/lifecycle/AbstractLifeCycle.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.webbeans.lifecycle;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.enterprise.inject.spi.BeanManager;
import org.apache.webbeans.config.BeansDeployer;
import org.apache.webbeans.config.OWBLogConst;
import org.apache.webbeans.config.WebBeansContext;
import org.apache.webbeans.config.WebBeansFinder;
import org.apache.webbeans.container.BeanManagerImpl;
import org.apache.webbeans.container.InjectionResolver;
import org.apache.webbeans.portable.events.discovery.BeforeShutdownImpl;
import org.apache.webbeans.spi.ContainerLifecycle;
import org.apache.webbeans.spi.ContextsService;
import org.apache.webbeans.spi.JNDIService;
import org.apache.webbeans.spi.ScannerService;
import org.apache.webbeans.util.WebBeansConstants;
import org.apache.webbeans.util.WebBeansUtil;
public abstract class AbstractLifeCycle implements ContainerLifecycle
{
protected AtomicBoolean started = new AtomicBoolean(false);
//Logger instance
protected Logger logger;
/**Discover bean classes*/
protected ScannerService scannerService;
protected final ContextsService contextsService;
/**Deploy discovered beans*/
private final BeansDeployer deployer;
/**Using for lookup operations*/
private final JNDIService jndiService;
/**Root container.*/
private final BeanManagerImpl beanManager;
protected final WebBeansContext webBeansContext;
protected AbstractLifeCycle()
{
this(null);
}
protected AbstractLifeCycle(Properties properties)
{
this(properties, WebBeansContext.getInstance());
}
protected AbstractLifeCycle(Properties properties, WebBeansContext webBeansContext)
{
beforeInitApplication(properties);
this.webBeansContext = webBeansContext;
beanManager = this.webBeansContext.getBeanManagerImpl();
deployer = new BeansDeployer(webBeansContext);
jndiService = this.webBeansContext.getService(JNDIService.class);
scannerService = this.webBeansContext.getScannerService();
contextsService = this.webBeansContext.getService(ContextsService.class);
initApplication(properties);
}
public WebBeansContext getWebBeansContext()
{
return webBeansContext;
}
@Override
public BeanManager getBeanManager()
{
return beanManager;
}
@Override
public void startApplication(Object startupObject)
{
bootstrapApplication(startupObject);
}
protected synchronized void bootstrapApplication(Object startupObject)
{
if (started.get())
{
return;
}
// Initalize Application Context
logger.info(OWBLogConst.INFO_0005);
long begin = System.currentTimeMillis();
//Before Start
beforeStartApplication(startupObject);
//Load all plugins
webBeansContext.getPluginLoader().startUp();
//Initialize contexts
contextsService.init(startupObject);
//Scanning process
logger.fine("Scanning classpaths for beans artifacts.");
//Scan
scannerService.scan();
//Deploy beans
logger.fine("Deploying scanned beans.");
//Deploy
deployer.deploy(scannerService);
//Start actual starting on sub-classes
afterStartApplication(startupObject);
if (logger.isLoggable(Level.INFO))
{
logger.log(Level.INFO, OWBLogConst.INFO_0001, Long.toString(System.currentTimeMillis() - begin));
}
started.set(true);
}
@Override
public void stopApplication(Object endObject)
{
logger.fine("OpenWebBeans Container is stopping.");
try
{
//Sub-classes operations
beforeStopApplication(endObject);
//Fire shut down
beanManager.fireLifecycleEvent(new BeforeShutdownImpl());
contextsService.destroy(endObject);
//Unbind BeanManager
jndiService.unbind(WebBeansConstants.WEB_BEANS_MANAGER_JNDI_NAME);
//Free all plugin resources
webBeansContext.getPluginLoader().shutDown();
//Clear extensions
webBeansContext.getExtensionLoader().clear();
//Delete Resolutions Cache
InjectionResolver injectionResolver = webBeansContext.getBeanManagerImpl().getInjectionResolver();
injectionResolver.clearCaches();
//Delete AnnotateTypeCache
webBeansContext.getAnnotatedElementFactory().clear();
//After Stop
afterStopApplication(endObject);
// Clear BeanManager
beanManager.clear();
}
catch (Exception e)
{
if (logger.isLoggable(Level.SEVERE))
{
logger.log(Level.SEVERE, OWBLogConst.ERROR_0021, e);
}
}
finally
{
started.set(false);
// Clear singleton list
WebBeansFinder.clearInstances(WebBeansUtil.getCurrentClassLoader());
}
}
/**
* @return the contextsService
*/
@Override
public ContextsService getContextService()
{
return contextsService;
}
@Override
public void initApplication(Properties properties)
{
afterInitApplication(properties);
}
protected void beforeInitApplication(Properties properties)
{
//Do nothing as default
}
protected void afterInitApplication(Properties properties)
{
//Do nothing as default
}
protected void afterStartApplication(Object startupObject)
{
//Do nothing as default
}
protected void afterStopApplication(Object stopObject)
{
//Do nothing as default
}
protected void beforeStartApplication(Object startupObject)
{
//Do nothing as default
}
protected void beforeStopApplication(Object stopObject)
{
//Do nothing as default
}
}
|
OWB-1082 set serializable BeanManager wrapper into ServletContext and similar 'external' usages
git-svn-id: 6e2e506005f11016269006bf59d22f905406eeba@1684722 13f79535-47bb-0310-9956-ffa450edef68
|
webbeans-impl/src/main/java/org/apache/webbeans/lifecycle/AbstractLifeCycle.java
|
OWB-1082 set serializable BeanManager wrapper into ServletContext and similar 'external' usages
|
|
Java
|
apache-2.0
|
e024c5a64674157478bd91bd7bdd36a9f95cd2ef
| 0
|
daniellavoie/BroadleafCommerce,bijukunjummen/BroadleafCommerce,gengzhengtao/BroadleafCommerce,macielbombonato/BroadleafCommerce,zhaorui1/BroadleafCommerce,sitexa/BroadleafCommerce,lgscofield/BroadleafCommerce,alextiannus/BroadleafCommerce,cloudbearings/BroadleafCommerce,jiman94/BroadleafCommerce-BroadleafCommerce2014,alextiannus/BroadleafCommerce,SerPenTeHoK/BroadleafCommerce,passion1014/metaworks_framework,cloudbearings/BroadleafCommerce,arshadalisoomro/BroadleafCommerce,lgscofield/BroadleafCommerce,wenmangbo/BroadleafCommerce,cengizhanozcan/BroadleafCommerce,macielbombonato/BroadleafCommerce,sitexa/BroadleafCommerce,arshadalisoomro/BroadleafCommerce,cogitoboy/BroadleafCommerce,passion1014/metaworks_framework,daniellavoie/BroadleafCommerce,caosg/BroadleafCommerce,rawbenny/BroadleafCommerce,cogitoboy/BroadleafCommerce,wenmangbo/BroadleafCommerce,trombka/blc-tmp,sitexa/BroadleafCommerce,bijukunjummen/BroadleafCommerce,rawbenny/BroadleafCommerce,TouK/BroadleafCommerce,zhaorui1/BroadleafCommerce,alextiannus/BroadleafCommerce,caosg/BroadleafCommerce,passion1014/metaworks_framework,arshadalisoomro/BroadleafCommerce,shopizer/BroadleafCommerce,gengzhengtao/BroadleafCommerce,liqianggao/BroadleafCommerce,bijukunjummen/BroadleafCommerce,SerPenTeHoK/BroadleafCommerce,zhaorui1/BroadleafCommerce,udayinfy/BroadleafCommerce,sanlingdd/broadleaf,gengzhengtao/BroadleafCommerce,udayinfy/BroadleafCommerce,udayinfy/BroadleafCommerce,cengizhanozcan/BroadleafCommerce,SerPenTeHoK/BroadleafCommerce,daniellavoie/BroadleafCommerce,rawbenny/BroadleafCommerce,ljshj/BroadleafCommerce,cloudbearings/BroadleafCommerce,ljshj/BroadleafCommerce,jiman94/BroadleafCommerce-BroadleafCommerce2014,ljshj/BroadleafCommerce,sanlingdd/broadleaf,liqianggao/BroadleafCommerce,cengizhanozcan/BroadleafCommerce,macielbombonato/BroadleafCommerce,shopizer/BroadleafCommerce,TouK/BroadleafCommerce,liqianggao/BroadleafCommerce,cogitoboy/BroadleafCommerce,lgscofield/BroadleafCommerce,TouK/BroadleafCommerce,trombka/blc-tmp,caosg/BroadleafCommerce,wenmangbo/BroadleafCommerce,trombka/blc-tmp,shopizer/BroadleafCommerce
|
package org.broadleafcommerce.offer.service;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import javax.annotation.Resource;
import org.apache.commons.beanutils.BeanComparator;
import org.apache.commons.collections.ComparatorUtils;
import org.broadleafcommerce.offer.domain.CandidateItemOffer;
import org.broadleafcommerce.offer.domain.CandidateItemOfferImpl;
import org.broadleafcommerce.offer.domain.Offer;
import org.broadleafcommerce.offer.domain.OfferCode;
import org.broadleafcommerce.order.domain.Order;
import org.broadleafcommerce.order.domain.OrderItem;
import org.broadleafcommerce.pricing.service.PricingService;
import org.broadleafcommerce.profile.domain.Customer;
import org.broadleafcommerce.type.OfferDiscountType;
import org.broadleafcommerce.type.OfferType;
import org.broadleafcommerce.util.money.Money;
import org.springframework.stereotype.Service;
/**
* The Class OfferServiceImpl.
*/
@Service("offerService")
public class OfferServiceImpl implements OfferService {
@Resource
private PricingService pricingService;
/*
* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#consumeOffer(org.broadleafcommerce.offer.domain.Offer, org.broadleafcommerce.profile.domain.Customer)
*/
@Override
public boolean consumeOffer(Offer offer, Customer customer) {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#applyOffersToOrder(java.util.List, org.broadleafcommerce.order.domain.Order)
*/
@SuppressWarnings("unchecked")
public void applyOffersToOrder(List<Offer> offers, Order order) {
List<Offer> qualifiedOrderOffers = new ArrayList<Offer>();
List<CandidateItemOffer> qualifiedItemOffers = new ArrayList<CandidateItemOffer>();
order.removeAllOffers();
order.setCandidateOffers(new ArrayList<Offer>());
order = pricingService.calculateOrderTotal(order);
List<Offer> offersWithValidDates = removeOutOfDateOffers(offers);
if (offersWithValidDates != null) {
//
// . Pass One:
//
for (Offer offer : offersWithValidDates) {
//
// . Evaluate all offers and compute their discount amount as if they were the only offer on the order
//
if(offer.getType().equals(OfferType.ORDER)){
// TODO: Determine if order qualifies for offer
// Assume for now that all orders qualify
offer = calculateAtomOfferDiscount(offer, order.getSubTotal());
qualifiedOrderOffers.add(offer);
} else if(offer.getType().equals(OfferType.ORDER_ITEM)){
for (OrderItem orderItem : order.getOrderItems()) {
// TODO: Determine if orderItem qualifies for offer
// Assume for now that all orderItems qualify
if(couldOfferApplyToOrderItem(offer, order, orderItem))
qualifiedItemOffers.add(new CandidateItemOfferImpl(orderItem, offer ));
}
} else if(offer.getType().equals(OfferType.FULLFILLMENT_GROUP)){
// TODO: Handle Offer calculation for offer type of fullfillment group
}
}
//
// . Create a sorted list sorted by priority asc then amount desc
//
Collections.sort(qualifiedOrderOffers, ComparatorUtils.reversedComparator(new BeanComparator("discountedPrice")));
Collections.sort(qualifiedOrderOffers, new BeanComparator("priority"));
//
// . Add offers that could be used on the order to the order.candidateOffers and item.candidateOffers lists respectively
//
order.setCandidateOffers(qualifiedOrderOffers);
for (OrderItem orderItem : order.getOrderItems()) {
orderItem.setCandidateItemOffers(qualifiedItemOffers);
}
//
// Pass Two:
//- Iterate through the list above and begin applying ALL of the offers to the order by doing the following:
for(OrderItem orderItem: order.getOrderItems()) {
for (CandidateItemOffer itemOffer : qualifiedItemOffers) {
if(orderItem.getCandidateItemOffers().contains(itemOffer)){
//- Determine the amount that should be discounted for each item
//----- If the items sale price is better than the discounted price, don't apply
if(itemOffer.getDiscountedPrice().greaterThan(orderItem.getSalePrice())){
// TODO: ----- If the offer requires other items, check to see if the items are still unmarked
if(requiresMultipleSkus(itemOffer)){
// TODO: apply offer to other skus
}
// ----- If the item itself has been marked by another discount then don't apply this offer unless the offer's applyDiscountToMarkedItems = true (edge case)
if(!orderItem.isMarkedForOffer() ||
(orderItem.isMarkedForOffer() && itemOffer.getOffer().isApplyDiscountToMarkedItems())){
//----- If the item already has a discount
if(orderItem.isMarkedForOffer()){
// and this offer is stackable, apply on top of the existing offer
if(itemOffer.getOffer().isStackable()){
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyItemOffer(orderItem,itemOffer);
}
// and this offer is not-stackable, don't apply
}else{
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyItemOffer(orderItem,itemOffer);
}
}
}
}
}
}
Money newOrderTotal = pricingService.calculateOrderTotal(order).getSubTotal();
// TODO: How and what do we do with order level offers?
for (Offer offer : qualifiedOrderOffers) {
if(order.getCandidateOffers().contains(offer)){
//- Determine the amount that should be discounted for each item
//----- If the order sale price is better than the discounted price, don't apply
if(newOrderTotal.greaterThan(order.getSubTotal())){
// ----- If the order itself has been marked by another discount then don't apply this offer unless the offer's applyDiscountToMarkedItems = true (edge case)
if(!order.isMarkedForOffer() ||
(order.isMarkedForOffer() && offer.isApplyDiscountToMarkedItems())){
//----- If the order already has a discount
if(order.isMarkedForOffer()){
// and this offer is stackable, apply on top of the existing offer
if(offer.isStackable()){
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyOrderOffer(order,offer);
}
// and this offer is not-stackable, don't apply
}else{
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyOrderOffer(order,offer);
}
}
}
}
}
//**************** PREVIOUS STRATEGY **************************
// order.setCandaditeOffers(offers);
// distributeItemOffers(order, offers);
//
// // At this point, each item may have a list of offers which have been sorted such that the
// // best offer is the first offer in the list.
//
// // Now we need to evaluate the order offers to determine if the order, item, or both should be
// // applied.
//
// // I see order offers having three variations of "stackable".
// // 1. Not-stackable,
// // 2. Stackable on top of item offers,
// // 3. Stackable on top of item and order offers
// // Based on this, we need to do the following:
// // 1. Build list of order offers stackable on top of item offers
// // 2. Build list of order offers stackable on top of just order offers
// // 3. Build list of order offers stackable that are not stackable
// // Next compute the following order totals:
// // E1. orderTotalWithItemOffersOnly = xxx;
// // E2. orderTotalWithItemAndOrderOffers = xxx;
// // E3. orderTotalWithBestOrderOfferOnly = xxx;
// //
// boolean e1wins=true;
// boolean e2wins=false;
// boolean e3wins=false;
// if (e1wins) {
// // TODO: Create ItemAdjustment and add them to each OrderItem for the winning offer on each item
// }
// if (e2wins) {
// // TODO: Create ItemAdjustment and add them to each OrderItem for the winning offer on each item
// // TODO: Create ItemAdjustment records for the winning Order offer(s) and add them to each item
// }
// if (e3wins) {
// // TODO: Create ItemAdjustment records for the winning Order offer(s) and add them to each item
// }
//
//
// // now we can apply the first offer for each item which might be a stacked offer
// // but first, we need to determine if a non-stacked order offer should be applied instead
// // TODO: handle item discount distribution (e.g. applies to maximum of 1 in this order)
// // TODO: compute order total with item discounts
// // TODO: compute order total without item discounts
// // TODO: compute best non-stackable order discount
// // TODO: compute best stackable order discount
// // TODO: compute best non-stackable order discount that works with item discounts
// // TODO: compute best
// // TODO: compute order total with discounts plus stackable order discounts
// // TODO: compute order total with
// //evaluateOffers(order);
//
}
}
private boolean requiresMultipleSkus(CandidateItemOffer itemOffer){
// TODO: Add determination code for offer requiring multiple skus
// Assume offer does not for now
return false;
}
private void applyItemOffer(OrderItem orderItem, CandidateItemOffer itemOffer){
// TODO: Apply item offer
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
orderItem.setMarkedForOffer(true);
}
private void applyOrderOffer(Order order, Offer offer){
// TODO: Apply order offer
order.setMarkedForOffer(true);
}
private Offer calculateAtomOfferDiscount(Offer offer, Money startingValue){
if(offer.getDiscountType().equals(OfferDiscountType.AMOUNT_OFF)){
offer.setDiscountPrice(startingValue.subtract(offer.getValue()));
}
if(offer.getDiscountType().equals(OfferDiscountType.FIX_PRICE)){
offer.setDiscountPrice(offer.getValue());
}
if(offer.getDiscountType().equals(OfferDiscountType.PERCENT_OFF)){
offer.setDiscountPrice(startingValue.multiply(Money.toAmount((offer.getValue().divide(new BigDecimal("100"))))));
}
return offer;
}
private List<Offer> removeOutOfDateOffers(List<Offer> offers){
Date now = new Date();
for (Offer offer : offers) {
if(offer.getStartDate()!= null && offer.getStartDate().after(now)){
offers.remove(offer);
}else
if(offer.getEndDate()!= null && offer.getEndDate().before(now)){
offers.remove(offer);
}
}
return offers;
}
/**
* Distribute item offers.
*
* @param order the order
* @param offers the offers
*/
// @SuppressWarnings("unchecked")
// private void distributeItemOffers(Order order, List<Offer> offers) {
// for (OrderItem item : order.getOrderItems()) {
// List<Offer> stackableOffersList = null;
// for (Offer offer : offers) {
// if (OfferType.ORDER_ITEM.equals(offer.getType())) {
// if (couldOfferApplyToOrderItem(offer, order, item)) {
// if (offer.isStackable()) {
// if (stackableOffersList == null) {
// stackableOffersList = new ArrayList<Offer>();
// }
// stackableOffersList.add(offer);
// } else {
// ItemOfferImpl candidateOffer = new ItemOfferImpl(offer, item.getRetailPrice(), item.getSalePrice());
// item.addCandidateItemOffer(candidateOffer);
// }
// }
// }
// }
// if (stackableOffersList != null) {
// StackedOffer stackedOffer = new StackedOffer(stackableOffersList, item.getRetailPrice(), item.getSalePrice());
// item.addCandidateItemOffer(stackedOffer);
// }
//
// // Sorts offers by priority then discounted price
// Collections.sort(item.getCandidateItemOffers(), new BeanComparator("priority", new BeanComparator("discountedPrice")));
// }
// }
/**
* Could offer apply to order item.
*
* @param offer the offer
* @param order the order
* @param item the item
*
* @return true, if successful
*/
private boolean couldOfferApplyToOrderItem(Offer offer, Order order, OrderItem item) {
// boolean appliesTo = false;
// boolean appliesWhen = false;
// TODO: Applies to rule should support any combination of the following expressions:
// TODO: // "all items",
// TODO: // "items whose ${reflected property} (eq, ne, in) ${value(s)}"
// TODO: // "items from category "${category}"
// TODO: .........................................
if(offer.getAppliesToRules() == null || offer.getAppliesToRules().equals("")){
// appliesTo = true;
}else{
// TODO: Evaluate rule to determine if this offer can apply to the given item
}
// TODO: if offer might apply to this item, then check the when condition
// TODO: "always"
// TODO: "when order contains $(qty} of item whose ${reflected property} (eq, ne, in) ${value}
if(offer.getAppliesWhenRules() == null || offer.getAppliesWhenRules().equals("")){
// appliesWhen = true;
}else{
// TODO: determine result of applies when rules
}
return true; // appliesTo && appliesWhen; Just return true for now
}
/**
* Choose item offers.
*
* @param order the order
*/
protected void chooseItemOffers(Order order) {
// Loop through offer
// Build list of order items that qualify for offer
// Sort list by item amount
//
}
/* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#lookupCodeByOffer(org.broadleafcommerce.offer.domain.Offer)
*/
@Override
public OfferCode lookupCodeByOffer(Offer offer) {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#lookupOfferByCode(java.lang.String)
*/
@Override
public Offer lookupOfferByCode(String code) {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#lookupValidOffersForSystem(java.lang.String)
*/
@Override
public List<Offer> lookupValidOffersForSystem(String system) {
// TODO Auto-generated method stub
return null;
}
}
|
BroadleafCommerce/src-framework/org/broadleafcommerce/offer/service/OfferServiceImpl.java
|
package org.broadleafcommerce.offer.service;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import javax.annotation.Resource;
import org.apache.commons.beanutils.BeanComparator;
import org.broadleafcommerce.offer.domain.CandidateItemOffer;
import org.broadleafcommerce.offer.domain.CandidateItemOfferImpl;
import org.broadleafcommerce.offer.domain.Offer;
import org.broadleafcommerce.offer.domain.OfferCode;
import org.broadleafcommerce.order.domain.Order;
import org.broadleafcommerce.order.domain.OrderItem;
import org.broadleafcommerce.pricing.service.PricingService;
import org.broadleafcommerce.profile.domain.Customer;
import org.broadleafcommerce.type.OfferDiscountType;
import org.broadleafcommerce.type.OfferType;
import org.broadleafcommerce.util.money.Money;
import org.springframework.stereotype.Service;
/**
* The Class OfferServiceImpl.
*/
@Service("offerService")
public class OfferServiceImpl implements OfferService {
@Resource
private PricingService pricingService;
/*
* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#consumeOffer(org.broadleafcommerce.offer.domain.Offer, org.broadleafcommerce.profile.domain.Customer)
*/
@Override
public boolean consumeOffer(Offer offer, Customer customer) {
// TODO Auto-generated method stub
return false;
}
/*
* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#applyOffersToOrder(java.util.List, org.broadleafcommerce.order.domain.Order)
*/
@SuppressWarnings("unchecked")
public void applyOffersToOrder(List<Offer> offers, Order order) {
List<Offer> qualifiedOrderOffers = new ArrayList<Offer>();
List<CandidateItemOffer> qualifiedItemOffers = new ArrayList<CandidateItemOffer>();
order.removeAllOffers();
order.setCandidateOffers(new ArrayList<Offer>());
order = pricingService.calculateOrderTotal(order);
List<Offer> offersWithValidDates = removeOutOfDateOffers(offers);
if (offersWithValidDates != null) {
//
// . Pass One:
//
for (Offer offer : offersWithValidDates) {
//
// . Evaluate all offers and compute their discount amount as if they were the only offer on the order
//
if(offer.getType().equals(OfferType.ORDER)){
// TODO: Determine if order qualifies for offer
// Assume for now that all orders qualify
offer = calculateAtomOfferDiscount(offer, order.getSubTotal());
qualifiedOrderOffers.add(offer);
}
if(offer.getType().equals(OfferType.ORDER_ITEM)){
for (OrderItem orderItem : order.getOrderItems()) {
// TODO: Determine if orderItem qualifies for offer
// Assume for now that all orderItems qualify
if(couldOfferApplyToOrderItem(offer, order, orderItem))
qualifiedItemOffers.add(new CandidateItemOfferImpl(orderItem, offer ));
}
}
if(offer.getType().equals(OfferType.FULLFILLMENT_GROUP)){
// TODO: Handle Offer calculation for offer type of fullfillment group
}
}
//
// . Create a sorted list sorted by priority asc then amount desc
//
Collections.sort(qualifiedOrderOffers, new BeanComparator("discountedPrice"));
Collections.reverse(qualifiedOrderOffers);
Collections.sort(qualifiedOrderOffers, new BeanComparator("priority"));
Collections.sort(qualifiedItemOffers, new BeanComparator("discountedPrice"));
Collections.reverse(qualifiedItemOffers);
Collections.sort(qualifiedItemOffers, new BeanComparator("priority"));
//
// . Add offers that could be used on the order to the order.candidateOffers and item.candidateOffers lists respectively
//
order.setCandidateOffers(qualifiedOrderOffers);
for (OrderItem orderItem : order.getOrderItems()) {
orderItem.setCandidateItemOffers(qualifiedItemOffers);
}
//
// Pass Two:
//- Iterate through the list above and begin applying ALL of the offers to the order by doing the following:
for(OrderItem orderItem: order.getOrderItems()) {
for (CandidateItemOffer itemOffer : qualifiedItemOffers) {
if(orderItem.getCandidateItemOffers().contains(itemOffer)){
//- Determine the amount that should be discounted for each item
//----- If the items sale price is better than the discounted price, don't apply
if(itemOffer.getDiscountedPrice().greaterThan(orderItem.getSalePrice())){
// TODO: ----- If the offer requires other items, check to see if the items are still unmarked
if(requiresMultipleSkus(itemOffer)){
// TODO: apply offer to other skus
}
// ----- If the item itself has been marked by another discount then don't apply this offer unless the offer's applyDiscountToMarkedItems = true (edge case)
if(!orderItem.isMarkedForOffer() ||
(orderItem.isMarkedForOffer() && itemOffer.getOffer().isApplyDiscountToMarkedItems())){
//----- If the item already has a discount
if(orderItem.isMarkedForOffer()){
// and this offer is stackable, apply on top of the existing offer
if(itemOffer.getOffer().isStackable()){
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyItemOffer(orderItem,itemOffer);
}
// and this offer is not-stackable, don't apply
}else{
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyItemOffer(orderItem,itemOffer);
}
}
}
}
}
}
Money newOrderTotal = pricingService.calculateOrderTotal(order).getSubTotal();
// TODO: How and what do we do with order level offers?
for (Offer offer : qualifiedOrderOffers) {
if(order.getCandidateOffers().contains(offer)){
//- Determine the amount that should be discounted for each item
//----- If the order sale price is better than the discounted price, don't apply
if(newOrderTotal.greaterThan(order.getSubTotal())){
// ----- If the order itself has been marked by another discount then don't apply this offer unless the offer's applyDiscountToMarkedItems = true (edge case)
if(!order.isMarkedForOffer() ||
(order.isMarkedForOffer() && offer.isApplyDiscountToMarkedItems())){
//----- If the order already has a discount
if(order.isMarkedForOffer()){
// and this offer is stackable, apply on top of the existing offer
if(offer.isStackable()){
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyOrderOffer(order,offer);
}
// and this offer is not-stackable, don't apply
}else{
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
applyOrderOffer(order,offer);
}
}
}
}
}
//**************** PREVIOUS STRATEGY **************************
// order.setCandaditeOffers(offers);
// distributeItemOffers(order, offers);
//
// // At this point, each item may have a list of offers which have been sorted such that the
// // best offer is the first offer in the list.
//
// // Now we need to evaluate the order offers to determine if the order, item, or both should be
// // applied.
//
// // I see order offers having three variations of "stackable".
// // 1. Not-stackable,
// // 2. Stackable on top of item offers,
// // 3. Stackable on top of item and order offers
// // Based on this, we need to do the following:
// // 1. Build list of order offers stackable on top of item offers
// // 2. Build list of order offers stackable on top of just order offers
// // 3. Build list of order offers stackable that are not stackable
// // Next compute the following order totals:
// // E1. orderTotalWithItemOffersOnly = xxx;
// // E2. orderTotalWithItemAndOrderOffers = xxx;
// // E3. orderTotalWithBestOrderOfferOnly = xxx;
// //
// boolean e1wins=true;
// boolean e2wins=false;
// boolean e3wins=false;
// if (e1wins) {
// // TODO: Create ItemAdjustment and add them to each OrderItem for the winning offer on each item
// }
// if (e2wins) {
// // TODO: Create ItemAdjustment and add them to each OrderItem for the winning offer on each item
// // TODO: Create ItemAdjustment records for the winning Order offer(s) and add them to each item
// }
// if (e3wins) {
// // TODO: Create ItemAdjustment records for the winning Order offer(s) and add them to each item
// }
//
//
// // now we can apply the first offer for each item which might be a stacked offer
// // but first, we need to determine if a non-stacked order offer should be applied instead
// // TODO: handle item discount distribution (e.g. applies to maximum of 1 in this order)
// // TODO: compute order total with item discounts
// // TODO: compute order total without item discounts
// // TODO: compute best non-stackable order discount
// // TODO: compute best stackable order discount
// // TODO: compute best non-stackable order discount that works with item discounts
// // TODO: compute best
// // TODO: compute order total with discounts plus stackable order discounts
// // TODO: compute order total with
// //evaluateOffers(order);
//
}
}
private boolean requiresMultipleSkus(CandidateItemOffer itemOffer){
// TODO: Add determination code for offer requiring multiple skus
// Assume offer does not for now
return false;
}
private void applyItemOffer(OrderItem orderItem, CandidateItemOffer itemOffer){
// TODO: Apply item offer
//----- Create corresponding item adjustments records and if (markItems == true) then mark the items used so that this offer is possible
orderItem.setMarkedForOffer(true);
}
private void applyOrderOffer(Order order, Offer offer){
// TODO: Apply order offer
order.setMarkedForOffer(true);
}
private Offer calculateAtomOfferDiscount(Offer offer, Money startingValue){
if(offer.getDiscountType().equals(OfferDiscountType.AMOUNT_OFF)){
offer.setDiscountPrice(startingValue.subtract(offer.getValue()));
}
if(offer.getDiscountType().equals(OfferDiscountType.FIX_PRICE)){
offer.setDiscountPrice(offer.getValue());
}
if(offer.getDiscountType().equals(OfferDiscountType.PERCENT_OFF)){
offer.setDiscountPrice(startingValue.multiply(Money.toAmount((offer.getValue().divide(new BigDecimal("100"))))));
}
return offer;
}
private List<Offer> removeOutOfDateOffers(List<Offer> offers){
Date now = new Date();
for (Offer offer : offers) {
if(offer.getStartDate()!= null && offer.getStartDate().after(now)){
offers.remove(offer);
}else
if(offer.getEndDate()!= null && offer.getEndDate().before(now)){
offers.remove(offer);
}
}
return offers;
}
/**
* Distribute item offers.
*
* @param order the order
* @param offers the offers
*/
// @SuppressWarnings("unchecked")
// private void distributeItemOffers(Order order, List<Offer> offers) {
// for (OrderItem item : order.getOrderItems()) {
// List<Offer> stackableOffersList = null;
// for (Offer offer : offers) {
// if (OfferType.ORDER_ITEM.equals(offer.getType())) {
// if (couldOfferApplyToOrderItem(offer, order, item)) {
// if (offer.isStackable()) {
// if (stackableOffersList == null) {
// stackableOffersList = new ArrayList<Offer>();
// }
// stackableOffersList.add(offer);
// } else {
// ItemOfferImpl candidateOffer = new ItemOfferImpl(offer, item.getRetailPrice(), item.getSalePrice());
// item.addCandidateItemOffer(candidateOffer);
// }
// }
// }
// }
// if (stackableOffersList != null) {
// StackedOffer stackedOffer = new StackedOffer(stackableOffersList, item.getRetailPrice(), item.getSalePrice());
// item.addCandidateItemOffer(stackedOffer);
// }
//
// // Sorts offers by priority then discounted price
// Collections.sort(item.getCandidateItemOffers(), new BeanComparator("priority", new BeanComparator("discountedPrice")));
// }
// }
/**
* Could offer apply to order item.
*
* @param offer the offer
* @param order the order
* @param item the item
*
* @return true, if successful
*/
private boolean couldOfferApplyToOrderItem(Offer offer, Order order, OrderItem item) {
// boolean appliesTo = false;
// boolean appliesWhen = false;
// TODO: Applies to rule should support any combination of the following expressions:
// TODO: // "all items",
// TODO: // "items whose ${reflected property} (eq, ne, in) ${value(s)}"
// TODO: // "items from category "${category}"
// TODO: .........................................
if(offer.getAppliesToRules() == null || offer.getAppliesToRules().equals("")){
// appliesTo = true;
}else{
// TODO: Evaluate rule to determine if this offer can apply to the given item
}
// TODO: if offer might apply to this item, then check the when condition
// TODO: "always"
// TODO: "when order contains $(qty} of item whose ${reflected property} (eq, ne, in) ${value}
if(offer.getAppliesWhenRules() == null || offer.getAppliesWhenRules().equals("")){
// appliesWhen = true;
}else{
// TODO: determine result of applies when rules
}
return true; // appliesTo && appliesWhen; Just return true for now
}
/**
* Choose item offers.
*
* @param order the order
*/
protected void chooseItemOffers(Order order) {
// Loop through offer
// Build list of order items that qualify for offer
// Sort list by item amount
//
}
/* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#lookupCodeByOffer(org.broadleafcommerce.offer.domain.Offer)
*/
@Override
public OfferCode lookupCodeByOffer(Offer offer) {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#lookupOfferByCode(java.lang.String)
*/
@Override
public Offer lookupOfferByCode(String code) {
// TODO Auto-generated method stub
return null;
}
/* (non-Javadoc)
* @see org.broadleafcommerce.offer.service.OfferService#lookupValidOffersForSystem(java.lang.String)
*/
@Override
public List<Offer> lookupValidOffersForSystem(String system) {
// TODO Auto-generated method stub
return null;
}
}
|
Fixed some basic if logic and collections sorting
|
BroadleafCommerce/src-framework/org/broadleafcommerce/offer/service/OfferServiceImpl.java
|
Fixed some basic if logic and collections sorting
|
|
Java
|
apache-2.0
|
a3f1be5d1c878ff2fd2bf9cb9c8150625d426148
| 0
|
lorban/ehcache3,rishabhmonga/ehcache3,aurbroszniowski/ehcache3,chrisdennis/ehcache3,AbfrmBlr/ehcache3,henri-tremblay/ehcache3,AbfrmBlr/ehcache3,jhouserizer/ehcache3,akomakom/ehcache3,alexsnaps/ehcache3,ljacomet/ehcache3,GaryWKeim/ehcache3,anthonydahanne/ehcache3,albinsuresh/ehcache3,cljohnso/ehcache3,cschanck/ehcache3,cschanck/ehcache3,cljohnso/ehcache3,chrisdennis/ehcache3,albinsuresh/ehcache3,rkavanap/ehcache3,GaryWKeim/ehcache3,ehcache/ehcache3,aurbroszniowski/ehcache3,jhouserizer/ehcache3,rkavanap/ehcache3,ljacomet/ehcache3,kedar031/ehcache3,ehcache/ehcache3,lorban/ehcache3
|
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.management.providers;
import org.terracotta.management.call.Parameter;
import org.terracotta.management.capabilities.Capability;
import org.terracotta.management.capabilities.context.CapabilityContext;
import org.terracotta.management.capabilities.descriptors.Descriptor;
import org.terracotta.management.context.Context;
import org.terracotta.management.stats.Statistic;
import java.util.Collection;
import java.util.Map;
/**
* Interface to a provider of management capabilities for certain object class.
*
* @author Ludovic Orban
*/
public interface ManagementProvider<T> {
/**
* The class of managed objects.
*
* @return a class.
*/
Class<T> managedType();
/**
* Register an object for management in the current provider.
*
* @param managedObject the object to manage.
*/
void register(T managedObject);
/**
* Unregister a managed object from the current provider.
*
* @param managedObject the managed object.
*/
void unregister(T managedObject);
/**
* Get the set of capability descriptors the current provider provides.
*
* @return the set of capability descriptors.
*/
Collection<Descriptor> getDescriptors();
/**
* Get the context that the provided capabilities need to run.
*
* @return the context requirements.
*/
CapabilityContext getCapabilityContext();
/**
* @return The full capability of this management provider
*/
Capability getCapability();
/**
* @return The name of this capability
*/
String getCapabilityName();
/**
* Collect statistics, if the provider supports this.
*
* @param context the context.
* @param statisticNames the statistic names to collect.
* @param since The unix time in ms from where to return the statistics for statistics based on samples.
* @return the statistic map, the key being the statistic names.
*/
Map<String, Statistic<?, ?>> collectStatistics(Context context, Collection<String> statisticNames, long since);
/**
* Call an action, if the provider supports this.
*
* @param context the context.
* @param methodName the method name.
* @param parameters the action method's parameters (objects and class names)
* @param returnType The expected return type
* @return the action's return value.
*/
<T> T callAction(Context context, String methodName, Class<T> returnType, Parameter... parameters);
/**
* Check wheter this management provider supports the given context
*
* @param context The management context, passed from the {@link org.ehcache.management.ManagementRegistry} methods
* @return true if the context is supported by this management provider
*/
boolean supports(Context context);
/**
* Closes the management provider. Called when cache manager is closing.
*/
void close();
}
|
management/src/main/java/org/ehcache/management/providers/ManagementProvider.java
|
/*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ehcache.management.providers;
import org.terracotta.management.call.Parameter;
import org.terracotta.management.capabilities.Capability;
import org.terracotta.management.capabilities.context.CapabilityContext;
import org.terracotta.management.capabilities.descriptors.Descriptor;
import org.terracotta.management.context.Context;
import org.terracotta.management.stats.Statistic;
import java.util.Collection;
import java.util.Map;
/**
* Interface to a provider of management capabilities for certain object class.
*
* @author Ludovic Orban
*/
public interface ManagementProvider<T> {
/**
* The class of managed objects.
*
* @return a class.
*/
Class<T> managedType();
/**
* Register an object for management in the current provider.
*
* @param managedObject the object to manage.
*/
void register(T managedObject);
/**
* Unregister a managed object from the current provider.
*
* @param managedObject the managed object.
*/
void unregister(T managedObject);
/**
* Get the set of capability descriptors the current provider provides.
*
* @return the set of capability descriptors.
*/
Collection<Descriptor> getDescriptors();
/**
* Get the context that the provided capabilities need to run.
*
* @return the context requirements.
*/
CapabilityContext getCapabilityContext();
/**
* @return The full capability of this management provider
*/
Capability getCapability();
/**
* @return The name of this capability
*/
String getCapabilityName();
/**
* Collect statistics, if the provider supports this.
*
* @param context the context.
* @param statisticNames the statistic names to collect.
* @param since The unix time in ms from where to return the statistics for statistics based on samples.
* @return the statistic map, the key being the statistic namesø.
*/
Map<String, Statistic<?, ?>> collectStatistics(Context context, Collection<String> statisticNames, long since);
/**
* Call an action, if the provider supports this.
*
* @param context the context.
* @param methodName the method name.
* @param parameters the action method's parameters (objects and class names)
* @param returnType The expected return type
* @return the action's return value.
*/
<T> T callAction(Context context, String methodName, Class<T> returnType, Parameter... parameters);
/**
* Check wheter this management provider supports the given context
*
* @param context The management context, passed from the {@link org.ehcache.management.ManagementRegistry} methods
* @return true if the context is supported by this management provider
*/
boolean supports(Context context);
/**
* Closes the management provider. Called when cache manager is closing.
*/
void close();
}
|
Issue #691 Remove rogue character from javadoc
|
management/src/main/java/org/ehcache/management/providers/ManagementProvider.java
|
Issue #691 Remove rogue character from javadoc
|
|
Java
|
apache-2.0
|
a4970c5fc0eef4b8e4ca7692def72e6efa5e9968
| 0
|
mysisl/blade,mysisl/blade,mysisl/blade
|
/**
* Copyright (c) 2015-2016, Chill Zhuang 庄骞 (smallchill@163.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.smallchill.common.vo;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import com.smallchill.core.constant.ConstCache;
import com.smallchill.core.interfaces.ILoader;
import com.smallchill.core.plugins.dao.Db;
import com.smallchill.core.toolbox.Func;
import com.smallchill.core.toolbox.Record;
import com.smallchill.core.toolbox.kit.CacheKit;
import com.smallchill.core.toolbox.kit.CollectionKit;
import com.smallchill.core.toolbox.kit.StrKit;
public class ShiroUser implements Serializable {
private static final long serialVersionUID = 6847303349754497231L;
private Object id;// 主键
private Object deptId;// 部门id
private String deptName;// 部门名称
private String loginName;// 账号
private String name;// 姓名
private List<String> roleList;// 角色集
private String roles;// 角色集
private Object subDepts;// 子部门集合
private Object subRoles;// 子角色集合
private Object subUsers;// 子账号集合
public ShiroUser(Object id, Object deptId, String loginName, String name, List<String> roleList) {
this.id = id;
this.deptId = deptId;
this.deptName = Func.getDeptName(deptId);
this.loginName = loginName;
this.name = name;
this.roleList = roleList;
this.roles = CollectionKit.join(roleList.toArray(), ",");
// 递归查找子部门id集合
String deptSql;
String subDepts = null;
if (Func.isOracle()) {
deptSql = "select wm_concat(ID) subDepts from (select ID,PID,SIMPLENAME from TFW_DEPT start with ID in (#{join(deptIds)}) connect by prior ID=PID order by ID) a where a.ID not in (#{join(deptIds)})";
subDepts = Db.init().queryStr(deptSql, Record.create().set("deptIds", deptId.toString().split(",")));
} else {
String[] arr = deptId.toString().split(",");
StringBuilder sb = new StringBuilder();
for (String deptid : arr) {
deptSql = "select queryChildren(#{deptid},'tfw_dept') as subdepts";
String str = Db.init().queryStr(deptSql, Record.create().set("deptid", deptid));
sb.append(str).append(",");
}
subDepts = StrKit.removeSuffix(sb.toString(), ",");
}
this.subDepts = subDepts;
// 递归查找子角色id集合
String roleSql;
String subRoles = null;
if (Func.isOracle()) {
roleSql = "select wm_concat(ID) subRoles from (select ID,PID,NAME from TFW_ROLE start with ID in (#{join(roleIds)}) connect by prior ID=PID order by ID) a where a.ID not in (#{join(roleIds)})";
subRoles = Db.init().queryStr(roleSql, Record.create().set("roleIds", roleList));
} else {
StringBuilder sb = new StringBuilder();
for (String roleid : roleList) {
roleSql = "SELECT queryChildren(#{deptid},'tfw_role') as subroles";
String str = Db.init().queryStr(roleSql, Record.create().set("deptid", roleid));
sb.append(str).append(",");
}
subRoles = StrKit.removeSuffix(sb.toString(), ",");
}
this.subRoles = subRoles;
// 查找子角色对应账号id集合
List<Map<String, Object>> listUser = CacheKit.get(ConstCache.USER_CACHE, "user_all_list", new ILoader() {
@Override
public Object load() {
return Db.init().selectList("SELECT * FROM TFW_USER");
}
});
String[] subrolestr = Func.format(this.subRoles).split(",");
StringBuilder sbUser = new StringBuilder();
for (Map<String, Object> map : listUser) {
for (String str : subrolestr) {
if (Func.format(map.get("ROLEID")).indexOf(str) >= 0 && (("," + sbUser.toString() + ",").indexOf("," + Func.format(map.get("ID")) + ",") == -1)) {
Func.builder(sbUser, Func.format(map.get("ID")) + ",");
}
}
}
this.subUsers = StrKit.removeSuffix(sbUser.toString(), ",");
}
public Object getId() {
return id;
}
public void setId(Object id) {
this.id = id;
}
public Object getDeptId() {
return deptId;
}
public void setDeptId(Object deptId) {
this.deptId = deptId;
}
public String getLoginName() {
return loginName;
}
public void setLoginName(String loginName) {
this.loginName = loginName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<String> getRoleList() {
return roleList;
}
public void setRoleList(List<String> roleList) {
this.roleList = roleList;
}
public String getRoles() {
return roles;
}
public void setRoles(String roles) {
this.roles = roles;
}
public String getDeptName() {
return deptName;
}
public void setDeptName(String deptName) {
this.deptName = deptName;
}
public Object getSubDepts() {
return subDepts;
}
public void setSubDepts(Object subDepts) {
this.subDepts = subDepts;
}
public Object getSubRoles() {
return subRoles;
}
public void setSubRoles(Object subRoles) {
this.subRoles = subRoles;
}
public Object getSubUsers() {
return subUsers;
}
public void setSubUsers(Object subUsers) {
this.subUsers = subUsers;
}
}
|
src/main/java/com/smallchill/common/vo/ShiroUser.java
|
/**
* Copyright (c) 2015-2016, Chill Zhuang 庄骞 (smallchill@163.com).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.smallchill.common.vo;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import com.smallchill.core.constant.ConstCache;
import com.smallchill.core.interfaces.ILoader;
import com.smallchill.core.plugins.dao.Db;
import com.smallchill.core.toolbox.Func;
import com.smallchill.core.toolbox.Record;
import com.smallchill.core.toolbox.kit.CacheKit;
import com.smallchill.core.toolbox.kit.CollectionKit;
import com.smallchill.core.toolbox.kit.StrKit;
public class ShiroUser implements Serializable {
private static final long serialVersionUID = 6847303349754497231L;
private Object id;// 主键
private Object deptId;// 部门id
private String deptName;// 部门名称
private String loginName;// 账号
private String name;// 姓名
private List<String> roleList;// 角色集
private String roles;// 角色集
private Object subDepts;// 子部门集合
private Object subRoles;// 子角色集合
private Object subUsers;// 子账号集合
public ShiroUser(Object id, Object deptId, String loginName, String name, List<String> roleList) {
this.id = id;
this.deptId = deptId;
this.deptName = Func.getDeptName(deptId);
this.loginName = loginName;
this.name = name;
this.roleList = roleList;
this.roles = CollectionKit.join(roleList.toArray(), ",");
// 递归查找子部门id集合
String deptSql;
String subDepts = null;
if (Func.isOracle()) {
deptSql = "select wm_concat(ID) subDepts from (select ID,PID,SIMPLENAME from TFW_DEPT start with ID in (#{join(deptIds)}) connect by prior ID=PID order by ID) a where a.ID not in (#{join(deptIds)})";
subDepts = Db.init().queryStr(deptSql, Record.create().set("deptIds", deptId.toString().split(",")));
} else {
String[] arr = deptId.toString().split(",");
StringBuilder sb = new StringBuilder();
for (String deptid : arr) {
deptSql = "select queryChildren(#{deptid},'tfw_dept') as subdepts";
String str = Db.init().queryStr(deptSql, Record.create().set("deptid", deptid));
sb.append(str).append(",");
}
subDepts = StrKit.removeSuffix(sb.toString(), ",");
}
this.subDepts = subDepts;
// 递归查找子角色id集合
String roleSql;
String subRoles = null;
if (Func.isOracle()) {
roleSql = "select wm_concat(ID) subRoles from (select ID,PID,NAME from TFW_ROLE start with ID in (#{join(roleIds)}) connect by prior ID=PID order by ID) a where a.ID not in (#{join(roleIds)})";
subRoles = Db.init().queryStr(roleSql, Record.create().set("roleIds", roleList));
} else {
StringBuilder sb = new StringBuilder();
for (String roleid : roleList) {
roleSql = "SELECT queryChildren(#{deptid},'tfw_role') as subroles";
String str = Db.init().queryStr(roleSql, Record.create().set("deptid", roleid));
sb.append(str).append(",");
}
subRoles = StrKit.removeSuffix(sb.toString(), ",");
}
this.subRoles = subRoles;
// 查找子角色对应账号id集合
List<Map<String, Object>> listUser = CacheKit.get(ConstCache.USER_CACHE, "user_all_list", new ILoader() {
@Override
public Object load() {
return Db.init().selectList("SELECT * FROM TFW_USER");
}
});
String[] subrolestr = Func.format(this.subRoles).split(",");
StringBuilder sbUser = new StringBuilder();
for (Map<String, Object> map : listUser) {
for (String str : subrolestr) {
if (Func.format(map.get("ROLEID")).indexOf(str) >= 0 && (("," + sbUser.toString() + ",").indexOf("," + Func.format(map.get("ID")) + ",") == -1)) {
Func.builder(sbUser, Func.format(map.get("ID")) + ",");
}
}
}
this.subUsers = StrKit.removeSuffix(sbUser.toString(), ",");
}
public Object getId() {
return id;
}
public void setId(Object id) {
this.id = id;
}
public Object getDeptId() {
return deptId;
}
public void setDeptId(Object deptId) {
this.deptId = deptId;
}
public String getLoginName() {
return loginName;
}
public void setLoginName(String loginName) {
this.loginName = loginName;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<String> getRoleList() {
return roleList;
}
public void setRoleList(List<String> roleList) {
this.roleList = roleList;
}
public String getRoles() {
return roles;
}
public void setRoles(String roles) {
this.roles = roles;
}
public String getDeptName() {
return deptName;
}
public void setDeptName(String deptName) {
this.deptName = deptName;
}
public Object getSubDepts() {
return subDepts;
}
public void setSubDepts(Object subDepts) {
this.subDepts = subDepts;
}
public Object getSubRoles() {
return subRoles;
}
public void setSubRoles(Object subRoles) {
this.subRoles = subRoles;
}
public Object getSubUsers() {
return subUsers;
}
public void setSubUsers(Object subUsers) {
this.subUsers = subUsers;
}
/**
* 本函数输出将作为默认的<shiro:principal/>输出.
*/
@Override
public String toString() {
return loginName;
}
}
|
优化shiroUser
|
src/main/java/com/smallchill/common/vo/ShiroUser.java
|
优化shiroUser
|
|
Java
|
apache-2.0
|
8b3922c30e7fc339001ea0ed6b50c134bd1fa7a5
| 0
|
jangorecki/h2o-3,jangorecki/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,mathemage/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,YzPaul3/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,michalkurka/h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,spennihana/h2o-3,mathemage/h2o-3,jangorecki/h2o-3,mathemage/h2o-3,h2oai/h2o-3,mathemage/h2o-3,spennihana/h2o-3,michalkurka/h2o-3,h2oai/h2o-3,mathemage/h2o-3,h2oai/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,spennihana/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,h2oai/h2o-dev,spennihana/h2o-3,YzPaul3/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,h2oai/h2o-dev,spennihana/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,mathemage/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,h2oai/h2o-3
|
package hex;
import hex.genmodel.GenModel;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowData;
import hex.genmodel.easy.exception.PredictException;
import hex.genmodel.easy.prediction.*;
import org.joda.time.DateTime;
import water.*;
import water.api.StreamWriter;
import water.codegen.CodeGenerator;
import water.codegen.CodeGeneratorPipeline;
import water.exceptions.JCodeSB;
import water.fvec.*;
import water.util.*;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Random;
import static hex.ModelMetricsMultinomial.getHitRatioTable;
/**
* A Model models reality (hopefully).
* A model can be used to 'score' a row (make a prediction), or a collection of
* rows on any compatible dataset - meaning the row has all the columns with the
* same names as used to build the mode and any categorical columns can
* be adapted.
*/
public abstract class Model<M extends Model<M,P,O>, P extends Model.Parameters, O extends Model.Output> extends Lockable<M> {
public interface DeepFeatures {
Frame scoreAutoEncoder(Frame frame, Key destination_key, boolean reconstruction_error_per_feature);
Frame scoreDeepFeatures(Frame frame, final int layer);
}
public interface GLRMArchetypes {
Frame scoreReconstruction(Frame frame, Key destination_key, boolean reverse_transform);
Frame scoreArchetypes(Frame frame, Key destination_key, boolean reverse_transform);
}
public interface LeafNodeAssignment {
Frame scoreLeafNodeAssignment(Frame frame, Key destination_key);
}
/**
* Default threshold for assigning class labels to the target class (for binomial models)
* @return threshold in 0...1
*/
public final double defaultThreshold() {
if (_output.nclasses() != 2 || _output._training_metrics == null)
return 0.5;
if (_output._validation_metrics != null && ((ModelMetricsBinomial)_output._validation_metrics)._auc != null)
return ((ModelMetricsBinomial)_output._validation_metrics)._auc.defaultThreshold();
if (((ModelMetricsBinomial)_output._training_metrics)._auc != null)
return ((ModelMetricsBinomial)_output._training_metrics)._auc.defaultThreshold();
return 0.5;
}
public final boolean isSupervised() { return _output.isSupervised(); }
/** Model-specific parameter class. Each model sub-class contains
* instance of one of these containing its builder parameters, with
* model-specific parameters. E.g. KMeansModel extends Model and has a
* KMeansParameters extending Model.Parameters; sample parameters include K,
* whether or not to normalize, max iterations and the initial random seed.
*
* <p>The non-transient fields are input parameters to the model-building
* process, and are considered "first class citizens" by the front-end - the
* front-end will cache Parameters (in the browser, in JavaScript, on disk)
* and rebuild Parameter instances from those caches.
*
* WARNING: Model Parameters is not immutable object and ModelBuilder can modify
* them!
*/
public abstract static class Parameters extends Iced<Parameters> {
/** Maximal number of supported levels in response. */
public static final int MAX_SUPPORTED_LEVELS = 1000;
/** The short name, used in making Keys. e.g. "GBM" */
abstract public String algoName();
/** The pretty algo name for this Model (e.g., Gradient Boosting Method, rather than GBM).*/
abstract public String fullName();
/** The Java class name for this Model (e.g., hex.tree.gbm.GBM, rather than GBM).*/
abstract public String javaName();
public Key<Frame> _train; // User-Key of the Frame the Model is trained on
public Key<Frame> _valid; // User-Key of the Frame the Model is validated on, if any
public int _nfolds = 0;
public boolean _keep_cross_validation_predictions = false;
public boolean _parallelize_cross_validation = true;
public enum FoldAssignmentScheme {
AUTO, Random, Modulo, Stratified
}
protected long nFoldSeed() { return new Random().nextLong(); }
public FoldAssignmentScheme _fold_assignment = FoldAssignmentScheme.AUTO;
public Distribution.Family _distribution = Distribution.Family.AUTO;
public double _tweedie_power = 1.5;
public double _quantile_alpha = 0.5;
protected double defaultStoppingTolerance() { return 1e-3; }
abstract public long progressUnits();
// TODO: This field belongs in the front-end column-selection process and
// NOT in the parameters - because this requires all model-builders to have
// column strip/ignore code.
public String[] _ignored_columns;// column names to ignore for training
public boolean _ignore_const_cols; // True if dropping constant cols
public String _weights_column;
public String _offset_column;
public String _fold_column;
// Scoring a model on a dataset is not free; sometimes it is THE limiting
// factor to model building. By default, partially built models are only
// scored every so many major model iterations - throttled to limit scoring
// costs to less than 10% of the build time. This flag forces scoring for
// every iteration, allowing e.g. more fine-grained progress reporting.
public boolean _score_each_iteration;
/**
* Maximum allowed runtime in seconds for model training. Use 0 to disable.
*/
public double _max_runtime_secs = 0;
/**
* Early stopping based on convergence of stopping_metric.
* Stop if simple moving average of the stopping_metric does not improve by stopping_tolerance for
* k scoring events.
* Can only trigger after at least 2k scoring events. Use 0 to disable.
*/
public int _stopping_rounds = 0;
/**
* Metric to use for convergence checking, only for _stopping_rounds > 0.
*/
public ScoreKeeper.StoppingMetric _stopping_metric = ScoreKeeper.StoppingMetric.AUTO;
/**
* Relative tolerance for metric-based stopping criterion: stop if relative improvement is not at least this much.
*/
public double _stopping_tolerance = defaultStoppingTolerance();
/** Supervised models have an expected response they get to train with! */
public String _response_column; // response column name
/** Should all classes be over/under-sampled to balance the class
* distribution? */
public boolean _balance_classes = false;
/** When classes are being balanced, limit the resulting dataset size to
* the specified multiple of the original dataset size. Maximum relative
* size of the training data after balancing class counts (can be less
* than 1.0) */
public float _max_after_balance_size = 5.0f;
/**
* Desired over/under-sampling ratios per class (lexicographic order).
* Only when balance_classes is enabled.
* If not specified, they will be automatically computed to obtain class balance during training.
*/
public float[] _class_sampling_factors;
/** For classification models, the maximum size (in terms of classes) of
* the confusion matrix for it to be printed. This option is meant to
* avoid printing extremely large confusion matrices. */
public int _max_confusion_matrix_size = 20;
/**
* A model key associated with a previously trained Deep Learning
* model. This option allows users to build a new model as a
* continuation of a previously generated model.
*/
public Key<? extends Model> _checkpoint;
// Public no-arg constructor for reflective creation
public Parameters() { _ignore_const_cols = defaultDropConsCols(); }
/** @return the training frame instance */
public final Frame train() { return _train==null ? null : _train.get(); }
/** @return the validation frame instance, or null
* if a validation frame was not specified */
public final Frame valid() { return _valid==null ? null : _valid.get(); }
/** Read-Lock both training and validation User frames. */
public void read_lock_frames(Job job) {
Frame tr = train();
if (tr != null)
tr.read_lock(job._key);
if (_valid != null && !_train.equals(_valid))
_valid.get().read_lock(job._key);
}
/** Read-UnLock both training and validation User frames. This method is
* called on crashing cleanup pathes, so handles the case where the frames
* are not actually locked. */
public void read_unlock_frames(Job job) {
Frame tr = train();
if( tr != null ) tr.unlock(job._key,false);
if( _valid != null && !_train.equals(_valid) )
valid().unlock(job._key,false);
}
// Override in subclasses to change the default; e.g. true in GLM
protected boolean defaultDropConsCols() { return true; }
/** Type of missing columns during adaptation between train/test datasets
* Overload this method for models that have sparse data handling - a zero
* will preserve the sparseness. Otherwise, NaN is used.
* @return real-valued number (can be NaN) */
public double missingColumnsType() { return Double.NaN; }
public boolean hasCheckpoint() { return _checkpoint != null; }
// FIXME: this is really horrible hack, Model.Parameters has method checksum_impl,
// but not checksum, the API is totally random :(
public long checksum() {
return checksum_impl();
}
/**
* Compute a checksum based on all non-transient non-static ice-able assignable fields (incl. inherited ones) which have @API annotations.
* Sort the fields first, since reflection gives us the fields in random order and we don't want the checksum to be affected by the field order.
* NOTE: if a field is added to a Parameters class the checksum will differ even when all the previous parameters have the same value. If
* a client wants backward compatibility they will need to compare parameter values explicitly.
*
* The method is motivated by standard hash implementation `hash = hash * P + value` but we use high prime numbers in random order.
* @return checksum
*/
protected long checksum_impl() {
long xs = 0x600DL;
int count = 0;
Field[] fields = Weaver.getWovenFields(this.getClass());
Arrays.sort(fields,
new Comparator<Field>() {
public int compare(Field field1, Field field2) {
return field1.getName().compareTo(field2.getName());
}
});
for (Field f : fields) {
final long P = MathUtils.PRIMES[count % MathUtils.PRIMES.length];
Class<?> c = f.getType();
if (c.isArray()) {
try {
f.setAccessible(true);
if (f.get(this) != null) {
if (c.getComponentType() == Integer.TYPE){
int[] arr = (int[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else if (c.getComponentType() == Float.TYPE) {
float[] arr = (float[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else if (c.getComponentType() == Double.TYPE) {
double[] arr = (double[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else if (c.getComponentType() == Long.TYPE){
long[] arr = (long[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else {
Object[] arr = (Object[]) f.get(this);
xs = xs * P + (long) Arrays.deepHashCode(arr);
} //else lead to ClassCastException
} else {
xs = xs * P;
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (ClassCastException t) {
throw H2O.fail(); //no support yet for int[][] etc.
}
} else {
try {
f.setAccessible(true);
Object value = f.get(this);
if (value != null) {
xs = xs * P + (long)(value.hashCode());
} else {
xs = xs * P + P;
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
count++;
}
xs ^= (train() == null ? 43 : train().checksum()) * (valid() == null ? 17 : valid().checksum());
return xs;
}
}
public P _parms; // TODO: move things around so that this can be protected
public String [] _warnings = new String[0];
public void addWarning(String s){
_warnings = Arrays.copyOf(_warnings,_warnings.length+1);
_warnings[_warnings.length-1] = s;
}
/** Model-specific output class. Each model sub-class contains an instance
* of one of these containing its "output": the pieces of the model needed
* for scoring. E.g. KMeansModel has a KMeansOutput extending Model.Output
* which contains the cluster centers. The output also includes the names,
* domains and other fields which are determined at training time. */
public abstract static class Output extends Iced {
/** Columns used in the model and are used to match up with scoring data
* columns. The last name is the response column name (if any). */
public String _names[];
/** List of Keys to cross-validation models (non-null iff _parms._nfolds > 1 or _parms._fold_column != null) **/
public Key _cross_validation_models[];
/** List of Keys to cross-validation predictions (if requested) **/
public Key _cross_validation_predictions[];
public Output(){this(false,false,false);}
public Output(boolean hasWeights, boolean hasOffset, boolean hasFold) {
_hasWeights = hasWeights;
_hasOffset = hasOffset;
_hasFold = hasFold;
}
/** Any final prep-work just before model-building starts, but after the
* user has clicked "go". E.g., converting a response column to an categorical
* touches the entire column (can be expensive), makes a parallel vec
* (Key/Data leak management issues), and might throw IAE if there are too
* many classes. */
public Output( ModelBuilder b ) {
_job = b._job;
if( b == null ) {
_hasOffset = false;
_hasWeights = false;
_hasFold = false;
return;
}
_isSupervised = b.isSupervised();
if( b.error_count() > 0 )
throw new IllegalArgumentException(b.validationErrors());
// Capture the data "shape" the model is valid on
_names = b._train.names ();
_domains= b._train.domains();
_hasOffset = b.hasOffsetCol();
_hasWeights = b.hasWeightCol();
_hasFold = b.hasFoldCol();
_distribution = b._distribution;
_priorClassDist = b._priorClassDist;
}
/** Returns number of input features (OK for most supervised methods, need to override for unsupervised!) */
public int nfeatures() {
return _names.length - (_hasOffset?1:0) - (_hasWeights?1:0) - (_hasFold?1:0) - (isSupervised()?1:0);
}
/** Categorical/factor mappings, per column. Null for non-categorical cols.
* Columns match the post-init cleanup columns. The last column holds the
* response col categoricals for SupervisedModels. */
public String _domains[][];
/** List of all the associated ModelMetrics objects, so we can delete them
* when we delete this model. */
Key[] _model_metrics = new Key[0];
/** Job info: final status (canceled, crashed), build time */
public Job _job;
/**
* Training set metrics obtained during model training
*/
public ModelMetrics _training_metrics;
/**
* Validation set metrics obtained during model training (if a validation data set was specified)
*/
public ModelMetrics _validation_metrics;
/**
* Cross-Validation metrics obtained during model training
*/
public ModelMetrics _cross_validation_metrics;
/**
* User-facing model summary - Display model type, complexity, size and other useful stats
*/
public TwoDimTable _model_summary;
/**
* User-facing model scoring history - 2D table with modeling accuracy as a function of time/trees/epochs/iterations, etc.
*/
public TwoDimTable _scoring_history;
protected boolean _isSupervised;
public boolean isSupervised() { return _isSupervised; }
/** The name of the response column (which is always the last column). */
protected final boolean _hasOffset; // weights and offset are kept at designated position in the names array
protected final boolean _hasWeights;// only need to know if we have them
protected final boolean _hasFold;// only need to know if we have them
public boolean hasOffset () { return _hasOffset;}
public boolean hasWeights () { return _hasWeights;}
public boolean hasFold () { return _hasFold;}
public String responseName() { return isSupervised()?_names[responseIdx()]:null;}
public String weightsName () { return _hasWeights ?_names[weightsIdx()]:null;}
public String offsetName () { return _hasOffset ?_names[offsetIdx()]:null;}
public String foldName () { return _hasFold ?_names[foldIdx()]:null;}
// Vec layout is [c1,c2,...,cn,w?,o?,r], cn are predictor cols, r is response, w and o are weights and offset, both are optional
public int weightsIdx () {
if(!_hasWeights) return -1;
return _names.length - (isSupervised()?1:0) - (hasOffset()?1:0) - 1 - (hasFold()?1:0);
}
public int offsetIdx () {
if(!_hasOffset) return -1;
return _names.length - (isSupervised()?1:0) - (hasFold()?1:0) - 1;
}
public int foldIdx () {
if(!_hasFold) return -1;
return _names.length - (isSupervised()?1:0) - 1;
}
public int responseIdx () {
if(!isSupervised()) return -1;
return _names.length-1;
}
/** The names of the levels for an categorical response column. */
public String[] classNames() { assert isSupervised();
return _domains[_domains.length-1];
}
/** Is this model a classification model? (v. a regression or clustering model) */
public boolean isClassifier() { return isSupervised() && nclasses() > 1; }
/** Is this model a binomial classification model? (v. a regression or clustering model) */
public boolean isBinomialClassifier() { return isSupervised() && nclasses() == 2; }
public int nclasses() {
assert isSupervised();
String cns[] = classNames();
return cns==null ? 1 : cns.length;
}
public double [] _distribution;
public double [] _modelClassDist;
public double [] _priorClassDist;
// Note: some algorithms MUST redefine this method to return other model categories
public ModelCategory getModelCategory() {
if(isSupervised())
return (isClassifier() ?
(nclasses() > 2 ? ModelCategory.Multinomial : ModelCategory.Binomial) :
ModelCategory.Regression);
return ModelCategory.Unknown;
}
public boolean isAutoencoder() { return false; } // Override in DeepLearning and so on.
public synchronized ModelMetrics addModelMetrics(ModelMetrics mm) {
DKV.put(mm);
for( Key key : _model_metrics ) // Dup removal
if( key==mm._key ) return mm;
_model_metrics = Arrays.copyOf(_model_metrics, _model_metrics.length + 1);
_model_metrics[_model_metrics.length - 1] = mm._key;
return mm; // Flow coding
}
public synchronized void clearModelMetrics() { _model_metrics = new Key[0]; }
protected long checksum_impl() {
return (null == _names ? 13 : Arrays.hashCode(_names)) *
(null == _domains ? 17 : Arrays.deepHashCode(_domains)) *
getModelCategory().ordinal();
}
public void printTwoDimTables(StringBuilder sb, Object o) {
for (Field f : Weaver.getWovenFields(o.getClass())) {
Class<?> c = f.getType();
if (c.isAssignableFrom(TwoDimTable.class)) {
try {
TwoDimTable t = (TwoDimTable) f.get(this);
f.setAccessible(true);
if (t != null) sb.append(t.toString(1,false /*don't print the full table if too long*/));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
if (_training_metrics!=null) sb.append(_training_metrics.toString());
if (_validation_metrics!=null) sb.append(_validation_metrics.toString());
if (_cross_validation_metrics!=null) sb.append(_cross_validation_metrics.toString());
printTwoDimTables(sb, this);
return sb.toString();
}
} // Output
protected String[][] scoringDomains() {return _output._domains;}
public O _output; // TODO: move things around so that this can be protected
public ModelMetrics addMetrics(ModelMetrics mm) { return _output.addModelMetrics(mm); }
public abstract ModelMetrics.MetricBuilder makeMetricBuilder(String[] domain);
/** Full constructor */
public Model( Key selfKey, P parms, O output) {
super(selfKey);
_parms = parms ; assert parms != null;
_output = output; // Output won't be set if we're assert output != null;
}
/**
* Deviance of given distribution function at predicted value f
* @param w observation weight
* @param y (actual) response
* @param f (predicted) response in original response space
* @return value of gradient
*/
public double deviance(double w, double y, double f) {
return new Distribution(Distribution.Family.gaussian).deviance(w, y, f);
}
protected ScoringInfo[] scoringInfo;
public ScoringInfo[] scoring_history() { return scoringInfo; }
/**
* Fill a ScoringInfo with data from the ModelMetrics for this model.
* @param scoringInfo
*/
public void fillScoringInfo(ScoringInfo scoringInfo) {
scoringInfo.is_classification = this._output.isClassifier();
scoringInfo.is_autoencoder = _output.isAutoencoder();
scoringInfo.scored_train = new ScoreKeeper(this._output._training_metrics);
scoringInfo.scored_valid = new ScoreKeeper(this._output._validation_metrics);
scoringInfo.scored_xval = new ScoreKeeper(this._output._cross_validation_metrics);
scoringInfo.validation = _output._validation_metrics != null;
scoringInfo.cross_validation = _output._cross_validation_metrics != null;
if (this._output.isBinomialClassifier()) {
scoringInfo.training_AUC = ((ModelMetricsBinomial)this._output._training_metrics)._auc;
scoringInfo.validation_AUC = this._output._validation_metrics == null ? null : ((ModelMetricsBinomial)this._output._validation_metrics)._auc;
}
}
// return the most up-to-date model metrics
public ScoringInfo last_scored() { return scoringInfo == null ? null : scoringInfo[scoringInfo.length-1]; }
// Lower is better
public float loss() {
switch (_parms._stopping_metric) {
case MSE:
return (float) mse();
case logloss:
return (float) logloss();
case deviance:
return (float) deviance();
case misclassification:
return (float) classification_error();
case AUC:
return (float)(1-auc());
case AUTO:
default:
return (float) (_output.isClassifier() ? logloss() : _output.isAutoencoder() ? mse() : deviance());
}
} // loss()
public int compareTo(M o) {
if (o._output.isClassifier() != _output.isClassifier()) throw new UnsupportedOperationException("Cannot compare classifier against regressor.");
if (o._output.isClassifier()) {
if (o._output.nclasses() != _output.nclasses())
throw new UnsupportedOperationException("Cannot compare models with different number of classes.");
}
return (loss() < o.loss() ? -1 : loss() > o.loss() ? 1 : 0);
}
public double classification_error() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._classError : last_scored().scored_train._classError;
}
public double mse() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._mse : last_scored().scored_train._mse;
}
public double auc() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._AUC : last_scored().scored_train._AUC;
}
public double deviance() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._mean_residual_deviance : last_scored().scored_train._mean_residual_deviance;
}
public double logloss() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._logloss : last_scored().scored_train._logloss;
}
/** Adapt a Test/Validation Frame to be compatible for a Training Frame. The
* intention here is that ModelBuilders can assume the test set has the same
* count of columns, and within each factor column the same set of
* same-numbered levels. Extra levels are renumbered past those in the
* Train set but will still be present in the Test set, thus requiring
* range-checking.
*
* This routine is used before model building (with no Model made yet) to
* check for compatible datasets, and also used to prepare a large dataset
* for scoring (with a Model).
*
* Adaption does the following things:
* - Remove any "extra" Vecs appearing only in the test and not the train
* - Insert any "missing" Vecs appearing only in the train and not the test
* with all NAs ({@see missingColumnsType}). This will issue a warning,
* and if the "expensive" flag is false won't actually make the column
* replacement column but instead will bail-out on the whole adaption (but
* will continue looking for more warnings).
* - If all columns are missing, issue an error.
* - Renumber matching cat levels to match the Train levels; this might make
* "holes" in the Test set cat levels, if some are not in the Test set.
* - Extra Test levels are renumbered past the end of the Train set, hence
* the train and test levels match up to all the train levels; there might
* be extra Test levels past that.
* - For all mis-matched levels, issue a warning.
*
* The {@code test} frame is updated in-place to be compatible, by altering
* the names and Vecs; make a defensive copy if you do not want it modified.
* There is a fast-path cutout if the test set is already compatible. Since
* the test-set is conditionally modifed with extra CategoricalWrappedVec optionally
* added it is recommended to use a Scope enter/exit to track Vec lifetimes.
*
* @param test Testing Frame, updated in-place
* @param expensive Try hard to adapt; this might involve the creation of
* whole Vecs and thus get expensive. If {@code false}, then only adapt if
* no warnings and errors; otherwise just the messages are produced.
* Created Vecs have to be deleted by the caller (e.g. Scope.enter/exit).
* @return Array of warnings; zero length (never null) for no warnings.
* Throws {@code IllegalArgumentException} if no columns are in common, or
* if any factor column has no levels in common.
*/
public String[] adaptTestForTrain( Frame test, boolean expensive, boolean computeMetrics) {
return adaptTestForTrain(_output._names, _output.weightsName(), _output.offsetName(), _output.foldName(), _output.responseName(), _output._domains, test, _parms.missingColumnsType(), expensive, computeMetrics);
}
/**
* @param names Training column names
* @param weights Name of column with observation weights, weights are NOT filled in if missing in test frame
* @param offset Name of column with offset, if not null (i.e. trained with offset), offset MUST be present in test data as well, otherwise can not scorew and IAE is thrown.
* @param fold
* @param response Name of response column, response is NOT filled in if missing in test frame
* @param domains Training column levels
* @param missing Substitute for missing columns; usually NaN
* */
public static String[] adaptTestForTrain(String[] names, String weights, String offset, String fold, String response, String[][] domains, Frame test, double missing, boolean expensive, boolean computeMetrics) throws IllegalArgumentException {
if( test == null) return new String[0];
// Fast path cutout: already compatible
String[][] tdomains = test.domains();
if( names == test._names && domains == tdomains )
return new String[0];
// Fast path cutout: already compatible but needs work to test
if( Arrays.equals(names,test._names) && Arrays.deepEquals(domains,tdomains) )
return new String[0];
// Build the validation set to be compatible with the training set.
// Toss out extra columns, complain about missing ones, remap categoricals
ArrayList<String> msgs = new ArrayList<>();
Vec vvecs[] = new Vec[names.length];
int good = 0; // Any matching column names, at all?
int convNaN = 0;
for( int i=0; i<names.length; i++ ) {
Vec vec = test.vec(names[i]); // Search in the given validation set
// For supervised problems, if the test set has no response, then we don't fill that in with NAs.
boolean isResponse = response != null && names[i].equals(response);
boolean isWeights = weights != null && names[i].equals(weights);
boolean isOffset = offset != null && names[i].equals(offset);
boolean isFold = fold != null && names[i].equals(fold);
if(vec == null && isResponse && computeMetrics)
throw new IllegalArgumentException("Test/Validation dataset is missing response vector '" + response + "'");
if(vec == null && isOffset)
throw new IllegalArgumentException("Test/Validation dataset is missing offset vector '" + offset + "'");
if(vec == null && isWeights && computeMetrics && expensive) {
vec = test.anyVec().makeCon(1);
msgs.add(H2O.technote(1, "Test/Validation dataset is missing the weights column '" + names[i] + "' (needed because a response was found and metrics are to be computed): substituting in a column of 1s"));
//throw new IllegalArgumentException(H2O.technote(1, "Test dataset is missing weights vector '" + weights + "' (needed because a response was found and metrics are to be computed)."));
}
// If a training set column is missing in the validation set, complain and fill in with NAs.
if( vec == null) {
String str = null;
if( expensive ) {
if (isFold) {
str = "Test/Validation dataset is missing fold column '" + names[i] + "': substituting in a column of 0s";
vec = test.anyVec().makeCon(0);
} else {
str = "Test/Validation dataset is missing training column '" + names[i] + "': substituting in a column of NAs";
vec = test.anyVec().makeCon(missing);
convNaN++;
}
vec.setDomain(domains[i]);
}
msgs.add(str);
}
if( vec != null ) { // I have a column with a matching name
if( domains[i] != null ) { // Model expects an categorical
if (vec.isString())
vec = VecUtils.stringToCategorical(vec); //turn a String column into a categorical column (we don't delete the original vec here)
if( expensive && vec.domain() != domains[i] && !Arrays.equals(vec.domain(),domains[i]) ) { // Result needs to be the same categorical
CategoricalWrappedVec evec;
try {
evec = vec.adaptTo(domains[i]); // Convert to categorical or throw IAE
} catch( NumberFormatException nfe ) {
throw new IllegalArgumentException("Test/Validation dataset has a non-categorical column '"+names[i]+"' which is categorical in the training data");
}
String[] ds = evec.domain();
assert ds != null && ds.length >= domains[i].length;
if( isResponse && vec.domain() != null && ds.length == domains[i].length+vec.domain().length )
throw new IllegalArgumentException("Test/Validation dataset has a categorical response column '"+names[i]+"' with no levels in common with the model");
if (ds.length > domains[i].length)
msgs.add("Test/Validation dataset column '" + names[i] + "' has levels not trained on: " + Arrays.toString(Arrays.copyOfRange(ds, domains[i].length, ds.length)));
vec = evec; good++;
} else {
good++;
}
} else if( vec.isCategorical() ) {
throw new IllegalArgumentException("Test/Validation dataset has categorical column '"+names[i]+"' which is real-valued in the training data");
} else {
good++; // Assumed compatible; not checking e.g. Strings vs UUID
}
}
vvecs[i] = vec;
}
if( good == convNaN )
throw new IllegalArgumentException("Test/Validation dataset has no columns in common with the training set");
if( good == names.length || (response != null && test.find(response) == -1 && good == names.length - 1) ) // Only update if got something for all columns
test.restructure(names,vvecs,good);
return msgs.toArray(new String[msgs.size()]);
}
/**
* Bulk score the frame, and auto-name the resulting predictions frame.
* @see #score(Frame, String)
* @param fr frame which should be scored
* @return A new frame containing a predicted values. For classification it
* contains a column with prediction and distribution for all
* response classes. For regression it contains only one column with
* predicted values.
* @throws IllegalArgumentException
*/
public Frame score(Frame fr) throws IllegalArgumentException {
return score(fr, null);
}
/** Bulk score the frame {@code fr}, producing a Frame result; the 1st
* Vec is the predicted class, the remaining Vecs are the probability
* distributions. For Regression (single-class) models, the 1st and only
* Vec is the prediction value. The result is in the DKV; caller is
* responsible for deleting.
*
* @param fr frame which should be scored
* @return A new frame containing a predicted values. For classification it
* contains a column with prediction and distribution for all
* response classes. For regression it contains only one column with
* predicted values.
* @throws IllegalArgumentException
*/
public Frame score(Frame fr, String destination_key) throws IllegalArgumentException {
Frame adaptFr = new Frame(fr);
boolean computeMetrics = (!isSupervised() || adaptFr.find(_output.responseName()) != -1);
adaptTestForTrain(adaptFr,true, computeMetrics); // Adapt
Frame output = predictScoreImpl(fr, adaptFr, destination_key); // Predict & Score
// Log modest confusion matrices
Vec predicted = output.vecs()[0]; // Modeled/predicted response
String mdomain[] = predicted.domain(); // Domain of predictions (union of test and train)
// Output is in the model's domain, but needs to be mapped to the scored
// dataset's domain.
if(_output.isClassifier() && computeMetrics) {
// assert(mdomain != null); // label must be categorical
ModelMetrics mm = ModelMetrics.getFromDKV(this,fr);
ConfusionMatrix cm = mm.cm();
if (cm != null && cm._domain != null) //don't print table for regression
if( cm._cm.length < _parms._max_confusion_matrix_size/*Print size limitation*/ ) {
Log.info(cm.table().toString(1));
}
if (mm.hr() != null) {
Log.info(getHitRatioTable(mm.hr()));
}
Vec actual = fr.vec(_output.responseName());
if( actual != null ) { // Predict does not have an actual, scoring does
String sdomain[] = actual.domain(); // Scored/test domain; can be null
if (sdomain != null && mdomain != sdomain && !Arrays.equals(mdomain, sdomain))
output.replace(0, new CategoricalWrappedVec(actual.group().addVec(), actual._rowLayout, sdomain, predicted._key));
}
}
cleanup_adapt(adaptFr, fr);
return output;
}
// Remove temp keys. TODO: Really should use Scope but Scope does not
// currently allow nested-key-keepers.
static protected void cleanup_adapt( Frame adaptFr, Frame fr ) {
Key[] keys = adaptFr.keys();
for( int i=0; i<keys.length; i++ )
if( fr.find(keys[i]) != -1 ) // Exists in the original frame?
keys[i] = null; // Do not delete it
adaptFr.delete();
}
/** Score an already adapted frame. Returns a new Frame with new result
* vectors, all in the DKV. Caller responsible for deleting. Input is
* already adapted to the Model's domain, so the output is also. Also
* computes the metrics for this frame.
*
* @param adaptFrm Already adapted frame
* @return A Frame containing the prediction column, and class distribution
*/
protected Frame predictScoreImpl(Frame fr, Frame adaptFrm, String destination_key) {
final boolean computeMetrics = (!isSupervised() || adaptFrm.find(_output.responseName()) != -1);
// Build up the names & domains.
final int nc = _output.nclasses();
final int ncols = nc==1?1:nc+1; // Regression has 1 predict col; classification also has class distribution
String[] names = new String[ncols];
String[][] domains = new String[ncols][];
names[0] = "predict";
for(int i = 1; i < names.length; ++i) {
names[i] = _output.classNames()[i - 1];
// turn integer class labels such as 0, 1, etc. into p0, p1, etc.
try {
Integer.valueOf(names[i]);
names[i] = "p" + names[i];
} catch (Throwable t) {
// do nothing, non-integer names are fine already
}
}
domains[0] = nc==1 ? null : !computeMetrics ? _output._domains[_output._domains.length-1] : adaptFrm.lastVec().domain();
// Score the dataset, building the class distribution & predictions
BigScore bs = new BigScore(domains[0],ncols,adaptFrm.means(),_output.hasWeights() && adaptFrm.find(_output.weightsName()) >= 0,computeMetrics, true /*make preds*/).doAll(ncols, Vec.T_NUM, adaptFrm);
if (computeMetrics)
bs._mb.makeModelMetrics(this, fr, adaptFrm, bs.outputFrame());
return bs.outputFrame((null == destination_key ? Key.make() : Key.make(destination_key)), names, domains);
}
/** Score an already adapted frame. Returns a MetricBuilder that can be used to make a model metrics.
* @param adaptFrm Already adapted frame
* @return MetricBuilder
*/
protected ModelMetrics.MetricBuilder scoreMetrics(Frame adaptFrm) {
final boolean computeMetrics = (!isSupervised() || adaptFrm.find(_output.responseName()) != -1);
// Build up the names & domains.
final int nc = _output.nclasses();
final int ncols = nc==1?1:nc+1; // Regression has 1 predict col; classification also has class distribution
String[] names = new String[ncols];
String[][] domains = new String[ncols][];
names[0] = "predict";
for(int i = 1; i < names.length; ++i) {
names[i] = _output.classNames()[i - 1];
// turn integer class labels such as 0, 1, etc. into p0, p1, etc.
try {
Integer.valueOf(names[i]);
names[i] = "p" + names[i];
} catch (Throwable t) {
// do nothing, non-integer names are fine already
}
}
domains[0] = nc==1 ? null : !computeMetrics ? _output._domains[_output._domains.length-1] : adaptFrm.lastVec().domain();
// Score the dataset, building the class distribution & predictions
BigScore bs = new BigScore(domains[0],ncols,adaptFrm.means(),_output.hasWeights() && adaptFrm.find(_output.weightsName()) >= 0,computeMetrics, false /*no preds*/).doAll(adaptFrm);
return bs._mb;
}
private class BigScore extends MRTask<BigScore> {
final String[] _domain; // Prediction domain; union of test and train classes
final int _npredcols; // Number of columns in prediction; nclasses+1 - can be less than the prediction domain
ModelMetrics.MetricBuilder _mb;
final double[] _mean; // Column means of test frame
final boolean _computeMetrics; // Column means of test frame
final boolean _hasWeights;
final boolean _makePreds;
BigScore( String[] domain, int ncols, double[] mean, boolean testHasWeights, boolean computeMetrics, boolean makePreds ) {
_domain = domain; _npredcols = ncols; _mean = mean; _computeMetrics = computeMetrics; _makePreds = makePreds;
if(_output._hasWeights && _computeMetrics && !testHasWeights)
throw new IllegalArgumentException("Missing weights when computing validation metrics.");
_hasWeights = testHasWeights;
}
@Override public void map( Chunk chks[], NewChunk cpreds[] ) {
if (isCancelled()) return;
Chunk weightsChunk = _hasWeights && _computeMetrics ? chks[_output.weightsIdx()] : new C0DChunk(1, chks[0]._len);
Chunk offsetChunk = _output.hasOffset() ? chks[_output.offsetIdx()] : new C0DChunk(0, chks[0]._len);
Chunk responseChunk = null;
double [] tmp = new double[_output.nfeatures()];
float [] actual = null;
_mb = Model.this.makeMetricBuilder(_domain);
if (_computeMetrics) {
if (isSupervised()) {
actual = new float[1];
responseChunk = chks[_output.responseIdx()];
} else
actual = new float[chks.length];
}
double[] preds = _mb._work; // Sized for the union of test and train classes
int len = chks[0]._len;
for (int row = 0; row < len; row++) {
double weight = weightsChunk.atd(row);
if (weight == 0) {
if (_makePreds) {
for (int c = 0; c < _npredcols; c++) // Output predictions; sized for train only (excludes extra test classes)
cpreds[c].addNum(0);
}
continue;
}
double offset = offsetChunk.atd(row);
double [] p = score0(chks, weight, offset, row, tmp, preds);
if (_computeMetrics) {
if(isSupervised()) {
actual[0] = (float)responseChunk.atd(row);
} else {
for(int i = 0; i < actual.length; ++i)
actual[i] = (float)chks[i].atd(row);
}
_mb.perRow(preds, actual, weight, offset, Model.this);
}
if (_makePreds) {
for (int c = 0; c < _npredcols; c++) // Output predictions; sized for train only (excludes extra test classes)
cpreds[c].addNum(p[c]);
}
}
}
@Override public void reduce( BigScore bs ) { if(_mb != null)_mb.reduce(bs._mb); }
@Override protected void postGlobal() { if(_mb != null)_mb.postGlobal(); }
}
/** Bulk scoring API for one row. Chunks are all compatible with the model,
* and expect the last Chunks are for the final distribution and prediction.
* Default method is to just load the data into the tmp array, then call
* subclass scoring logic. */
public double[] score0( Chunk chks[], int row_in_chunk, double[] tmp, double[] preds ) {
return score0(chks, 1, 0, row_in_chunk, tmp, preds);
}
public double[] score0( Chunk chks[], double weight, double offset, int row_in_chunk, double[] tmp, double[] preds ) {
assert(_output.nfeatures() == tmp.length);
for( int i=0; i< tmp.length; i++ )
tmp[i] = chks[i].atd(row_in_chunk);
double [] scored = score0(tmp, preds, weight, offset);
if(isSupervised()) {
// Correct probabilities obtained from training on oversampled data back to original distribution
// C.f. http://gking.harvard.edu/files/0s.pdf Eq.(27)
if( _output.isClassifier()) {
if (_parms._balance_classes)
GenModel.correctProbabilities(scored, _output._priorClassDist, _output._modelClassDist);
//assign label at the very end (after potentially correcting probabilities)
scored[0] = hex.genmodel.GenModel.getPrediction(scored, _output._priorClassDist, tmp, defaultThreshold());
}
}
return scored;
}
/** Subclasses implement the scoring logic. The data is pre-loaded into a
* re-used temp array, in the order the model expects. The predictions are
* loaded into the re-used temp array, which is also returned. */
protected abstract double[] score0(double data[/*ncols*/], double preds[/*nclasses+1*/]);
/**Override scoring logic for models that handle weight/offset**/
protected double[] score0(double data[/*ncols*/], double preds[/*nclasses+1*/], double weight, double offset) {
assert (weight == 1 && offset == 0) : "Override this method for non-trivial weight/offset!";
return score0(data, preds);
}
// Version where the user has just ponied-up an array of data to be scored.
// Data must be in proper order. Handy for JUnit tests.
public double score(double[] data){ return ArrayUtils.maxIndex(score0(data, new double[_output.nclasses()])); }
@Override protected Futures remove_impl( Futures fs ) {
if (_output._model_metrics != null)
for( Key k : _output._model_metrics )
k.remove(fs);
return super.remove_impl(fs);
}
/** Write out K/V pairs, in this case model metrics. */
@Override protected AutoBuffer writeAll_impl(AutoBuffer ab) {
if (_output._model_metrics != null)
for( Key k : _output._model_metrics )
ab.putKey(k);
return super.writeAll_impl(ab);
}
@Override protected Keyed readAll_impl(AutoBuffer ab, Futures fs) {
if (_output._model_metrics != null)
for( Key k : _output._model_metrics )
ab.getKey(k,fs); // Load model metrics
return super.readAll_impl(ab,fs);
}
@Override protected long checksum_impl() { return _parms.checksum_impl() * _output.checksum_impl(); }
// ==========================================================================
/** Return a String which is a valid Java program representing a class that
* implements the Model. The Java is of the form:
* <pre>
* class UUIDxxxxModel {
* public static final String NAMES[] = { ....column names... }
* public static final String DOMAINS[][] = { ....domain names... }
* // Pass in data in a double[], pre-aligned to the Model's requirements.
* // Jam predictions into the preds[] array; preds[0] is reserved for the
* // main prediction (class for classifiers or value for regression),
* // and remaining columns hold a probability distribution for classifiers.
* double[] predict( double data[], double preds[] );
* double[] map( HashMap < String,Double > row, double data[] );
* // Does the mapping lookup for every row, no allocation
* double[] predict( HashMap < String,Double > row, double data[], double preds[] );
* // Allocates a double[] for every row
* double[] predict( HashMap < String,Double > row, double preds[] );
* // Allocates a double[] and a double[] for every row
* double[] predict( HashMap < String,Double > row );
* }
* </pre>
*/
public final String toJava(boolean preview, boolean verboseCode) {
// 32k buffer by default
ByteArrayOutputStream os = new ByteArrayOutputStream(Short.MAX_VALUE);
// We do not need to close BAOS
/* ignore returned stream */ toJava(os, preview, verboseCode);
return os.toString();
}
public final SBPrintStream toJava(OutputStream os, boolean preview, boolean verboseCode) {
if (preview /* && toJavaCheckTooBig() */) {
os = new LineLimitOutputStreamWrapper(os, 1000);
}
return toJava(new SBPrintStream(os), preview, verboseCode);
}
protected SBPrintStream toJava(SBPrintStream sb, boolean isGeneratingPreview, boolean verboseCode) {
CodeGeneratorPipeline fileCtx = new CodeGeneratorPipeline(); // preserve file context
String modelName = JCodeGen.toJavaId(_key.toString());
// HEADER
sb.p("/*").nl();
sb.p(" Licensed under the Apache License, Version 2.0").nl();
sb.p(" http://www.apache.org/licenses/LICENSE-2.0.html").nl();
sb.nl();
sb.p(" AUTOGENERATED BY H2O at ").p(new DateTime().toString()).nl();
sb.p(" ").p(H2O.ABV.projectVersion()).nl();
sb.p(" ").nl();
sb.p(" Standalone prediction code with sample test data for ").p(this.getClass().getSimpleName()).p(" named ").p(modelName)
.nl();
sb.nl();
sb.p(" How to download, compile and execute:").nl();
sb.p(" mkdir tmpdir").nl();
sb.p(" cd tmpdir").nl();
sb.p(" curl http:/").p(H2O.SELF.toString()).p("/3/h2o-genmodel.jar > h2o-genmodel.jar").nl();
sb.p(" curl http:/").p(H2O.SELF.toString()).p("/3/Models.java/").pobj(_key).p(" > ").p(modelName).p(".java").nl();
sb.p(" javac -cp h2o-genmodel.jar -J-Xmx2g -J-XX:MaxPermSize=128m ").p(modelName).p(".java").nl();
// Intentionally disabled since there is no main method in generated code
// sb.p("// java -cp h2o-genmodel.jar:. -Xmx2g -XX:MaxPermSize=256m -XX:ReservedCodeCacheSize=256m ").p(modelName).nl();
sb.nl();
sb.p(" (Note: Try java argument -XX:+PrintCompilation to show runtime JIT compiler behavior.)").nl();
if (_parms._offset_column != null) {
sb.nl();
sb.nl();
sb.nl();
sb.p(" NOTE: Java model export does not support offset_column.").nl();
sb.nl();
Log.warn("Java model export does not support offset_column.");
}
if (isGeneratingPreview && toJavaCheckTooBig()) {
sb.nl();
sb.nl();
sb.nl();
sb.p(" NOTE: Java model is too large to preview, please download as shown above.").nl();
sb.nl();
return sb;
}
sb.p("*/").nl();
sb.p("import java.util.Map;").nl();
sb.p("import hex.genmodel.GenModel;").nl();
sb.p("import hex.genmodel.annotations.ModelPojo;").nl();
sb.nl();
String algo = this.getClass().getSimpleName().toLowerCase().replace("model", "");
sb.p("@ModelPojo(name=\"").p(modelName).p("\", algorithm=\"").p(algo).p("\")").nl();
sb.p("public class ").p(modelName).p(" extends GenModel {").nl().ii(1);
sb.ip("public hex.ModelCategory getModelCategory() { return hex.ModelCategory." + _output
.getModelCategory() + "; }").nl();
toJavaInit(sb, fileCtx).nl();
toJavaNAMES(sb, fileCtx);
toJavaNCLASSES(sb);
toJavaDOMAINS(sb, fileCtx);
toJavaPROB(sb);
toJavaSuper(modelName, sb); //
sb.p(" public String getUUID() { return Long.toString("+checksum()+"L); }").nl();
toJavaPredict(sb, fileCtx, verboseCode);
sb.p("}").nl().di(1);
fileCtx.generate(sb); // Append file context
sb.nl();
return sb;
}
/** Generate implementation for super class. */
protected SBPrintStream toJavaSuper(String modelName, SBPrintStream sb) {
return sb.nl().ip("public " + modelName + "() { super(NAMES,DOMAINS); }").nl();
}
private SBPrintStream toJavaNAMES(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
final String modelName = JCodeGen.toJavaId(_key.toString());
final String namesHolderClassName = "NamesHolder_"+modelName;
sb.i().p("// ").p("Names of columns used by model.").nl();
sb.i().p("public static final String[] NAMES = "+namesHolderClassName+".VALUES;").nl();
// Generate class which fills the names into array
fileCtx.add(new CodeGenerator() {
@Override
public void generate(JCodeSB out) {
out.i().p("// The class representing training column names").nl();
JCodeGen.toClassWithArray(out, null, namesHolderClassName,
Arrays.copyOf(_output._names, _output.nfeatures()));
}
});
return sb;
}
protected SBPrintStream toJavaNCLASSES(SBPrintStream sb ) {
return _output.isClassifier() ? JCodeGen.toStaticVar(sb, "NCLASSES",
_output.nclasses(),
"Number of output classes included in training data response column.")
: sb;
}
private SBPrintStream toJavaDOMAINS(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
String modelName = JCodeGen.toJavaId(_key.toString());
sb.nl();
sb.ip("// Column domains. The last array contains domain of response column.").nl();
sb.ip("public static final String[][] DOMAINS = new String[][] {").nl();
String [][] domains = scoringDomains();
for (int i=0; i< domains.length; i++) {
final int idx = i;
final String[] dom = domains[i];
final String colInfoClazz = modelName+"_ColInfo_"+i;
sb.i(1).p("/* ").p(_output._names[i]).p(" */ ");
if (dom != null) sb.p(colInfoClazz).p(".VALUES"); else sb.p("null");
if (i!=domains.length-1) sb.p(',');
sb.nl();
// Right now do not generate the class representing column
// since it does not hold any interesting information except String array holding domain
if (dom != null) {
fileCtx.add(new CodeGenerator() {
@Override
public void generate(JCodeSB out) {
out.ip("// The class representing column ").p(_output._names[idx]).nl();
JCodeGen.toClassWithArray(out, null, colInfoClazz, dom);
}
}
);
}
}
return sb.ip("};").nl();
}
protected SBPrintStream toJavaPROB(SBPrintStream sb) {
if(isSupervised()) {
JCodeGen.toStaticVar(sb, "PRIOR_CLASS_DISTRIB", _output._priorClassDist, "Prior class distribution");
JCodeGen.toStaticVar(sb, "MODEL_CLASS_DISTRIB", _output._modelClassDist, "Class distribution used for model building");
}
return sb;
}
protected boolean toJavaCheckTooBig() {
Log.warn("toJavaCheckTooBig must be overridden for this model type to render it in the browser");
return true;
}
// Override in subclasses to provide some top-level model-specific goodness
protected SBPrintStream toJavaInit(SBPrintStream sb, CodeGeneratorPipeline fileContext) { return sb; }
// Override in subclasses to provide some inside 'predict' call goodness
// Method returns code which should be appended into generated top level class after
// predict method.
protected void toJavaPredictBody(SBPrintStream body,
CodeGeneratorPipeline classCtx,
CodeGeneratorPipeline fileCtx,
boolean verboseCode) {
throw new IllegalArgumentException("This model type does not support conversion to Java");
}
// Wrapper around the main predict call, including the signature and return value
private SBPrintStream toJavaPredict(SBPrintStream ccsb,
CodeGeneratorPipeline fileCtx,
boolean verboseCode) { // ccsb = classContext
ccsb.nl();
ccsb.ip("// Pass in data in a double[], pre-aligned to the Model's requirements.").nl();
ccsb.ip("// Jam predictions into the preds[] array; preds[0] is reserved for the").nl();
ccsb.ip("// main prediction (class for classifiers or value for regression),").nl();
ccsb.ip("// and remaining columns hold a probability distribution for classifiers.").nl();
ccsb.ip("public final double[] score0( double[] data, double[] preds ) {").nl();
CodeGeneratorPipeline classCtx = new CodeGeneratorPipeline(); //new SB().ii(1);
toJavaPredictBody(ccsb.ii(1), classCtx, fileCtx, verboseCode);
ccsb.ip("return preds;").nl();
ccsb.di(1).ip("}").nl();
// Output class context
classCtx.generate(ccsb.ii(1));
ccsb.di(1);
return ccsb;
}
// Convenience method for testing: build Java, convert it to a class &
// execute it: compare the results of the new class's (JIT'd) scoring with
// the built-in (interpreted) scoring on this dataset. Returns true if all
// is well, false is there are any mismatches. Throws if there is any error
// (typically an AssertionError or unable to compile the POJO).
public boolean testJavaScoring( Frame data, Frame model_predictions, double rel_epsilon) {
assert data.numRows()==model_predictions.numRows();
final Frame fr = new Frame(data);
boolean computeMetrics = data.find(_output.responseName()) != -1;
try {
String[] warns = adaptTestForTrain(fr,true, computeMetrics);
if( warns.length > 0 )
System.err.println(Arrays.toString(warns));
// Output is in the model's domain, but needs to be mapped to the scored
// dataset's domain.
int[] omap = null;
if( _output.isClassifier() ) {
Vec actual = fr.vec(_output.responseName());
String sdomain[] = actual == null ? null : actual.domain(); // Scored/test domain; can be null
String mdomain[] = model_predictions.vec(0).domain(); // Domain of predictions (union of test and train)
if( sdomain != null && mdomain != sdomain && !Arrays.equals(mdomain, sdomain)) {
omap = CategoricalWrappedVec.computeMap(mdomain,sdomain); // Map from model-domain to scoring-domain
}
}
String modelName = JCodeGen.toJavaId(_key.toString());
boolean preview = false;
String java_text = toJava(preview, true);
GenModel genmodel;
try {
Class clz = JCodeGen.compile(modelName,java_text);
genmodel = (GenModel)clz.newInstance();
} catch (Exception e) {
throw H2O.fail("Internal POJO compilation failed",e);
}
Vec[] dvecs = fr.vecs();
Vec[] pvecs = model_predictions.vecs();
double features [] = MemoryManager.malloc8d(genmodel._names.length);
double predictions[] = MemoryManager.malloc8d(genmodel.nclasses() + 1);
// Compare predictions, counting mis-predicts
int totalMiss = 0;
int miss = 0;
for( int row=0; row<fr.numRows(); row++ ) { // For all rows, single-threaded
// Native Java API
for (int col = 0; col < features.length; col++) // Build feature set
features[col] = dvecs[col].at(row);
genmodel.score0(features, predictions); // POJO predictions
for (int col = 0; col < pvecs.length; col++) { // Compare predictions
double d = pvecs[col].at(row); // Load internal scoring predictions
if (col == 0 && omap != null) d = omap[(int) d]; // map categorical response to scoring domain
if (!MathUtils.compare(predictions[col], d, 1e-15, rel_epsilon)) {
if (miss++ < 10)
System.err.println("Predictions mismatch, row " + row + ", col " + model_predictions._names[col] + ", internal prediction=" + d + ", POJO prediction=" + predictions[col]);
}
}
totalMiss = miss;
}
// EasyPredict API
EasyPredictModelWrapper epmw = new EasyPredictModelWrapper(genmodel);
RowData rowData = new RowData();
for( int row=0; row<fr.numRows(); row++ ) { // For all rows, single-threaded
if (genmodel.getModelCategory() == ModelCategory.AutoEncoder) continue;
for( int col=0; col<features.length; col++ ) {
double val = dvecs[col].at(row);
rowData.put(
genmodel._names[col],
genmodel._domains[col] == null ? (Double) val
: (int)val < genmodel._domains[col].length ? genmodel._domains[col][(int)val] : "UnknownLevel");
}
AbstractPrediction p;
try { p=epmw.predict(rowData); }
catch (PredictException e) { continue; }
for (int col = 0; col < pvecs.length; col++) { // Compare predictions
double d = pvecs[col].at(row); // Load internal scoring predictions
if (col == 0 && omap != null) d = omap[(int) d]; // map categorical response to scoring domain
double d2 = Double.NaN;
switch( genmodel.getModelCategory()) {
case Clustering: d2 = ((ClusteringModelPrediction) p).cluster; break;
case Regression: d2 = ((RegressionModelPrediction) p).value; break;
case Binomial: BinomialModelPrediction bmp = ( BinomialModelPrediction) p;
d2 = (col==0) ? bmp.labelIndex : bmp.classProbabilities[col-1]; break;
case Multinomial: MultinomialModelPrediction mmp = (MultinomialModelPrediction) p;
d2 = (col==0) ? mmp.labelIndex : mmp.classProbabilities[col-1]; break;
}
if( !MathUtils.compare(d2, d, 1e-15, rel_epsilon) ) {
miss++;
System.err.println("EasyPredict Predictions mismatch, row " + row + ", col " + model_predictions._names[col] + ", internal prediction=" + d + ", POJO prediction=" + predictions[col]);
}
totalMiss = miss;
}
}
if (totalMiss != 0) System.err.println("Number of mismatches: " + totalMiss);
return totalMiss==0;
} finally {
cleanup_adapt(fr, data); // Remove temp keys.
}
}
public void deleteCrossValidationModels( ) {
if (_output._cross_validation_models != null) {
for (Key k : _output._cross_validation_models) {
Model m = DKV.getGet(k);
if (m!=null) m.delete(); //delete all subparts
}
}
}
@Override public String toString() {
return _output.toString();
}
/** Model stream writer - output Java code representation of model. */
public class JavaModelStreamWriter extends StreamWriter {
/** Show only preview */
private final boolean preview;
public JavaModelStreamWriter(boolean preview) {
this.preview = preview;
}
@Override
public void writeTo(OutputStream os) {
toJava(os, preview, true);
}
}
@Override public Class<water.api.KeyV3.ModelKeyV3> makeSchema() { return water.api.KeyV3.ModelKeyV3.class; }
}
|
h2o-core/src/main/java/hex/Model.java
|
package hex;
import hex.genmodel.GenModel;
import hex.genmodel.easy.EasyPredictModelWrapper;
import hex.genmodel.easy.RowData;
import hex.genmodel.easy.exception.PredictException;
import hex.genmodel.easy.prediction.*;
import org.joda.time.DateTime;
import water.*;
import water.api.StreamWriter;
import water.codegen.CodeGenerator;
import water.codegen.CodeGeneratorPipeline;
import water.exceptions.JCodeSB;
import water.fvec.*;
import water.util.*;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Random;
import static hex.ModelMetricsMultinomial.getHitRatioTable;
/**
* A Model models reality (hopefully).
* A model can be used to 'score' a row (make a prediction), or a collection of
* rows on any compatible dataset - meaning the row has all the columns with the
* same names as used to build the mode and any categorical columns can
* be adapted.
*/
public abstract class Model<M extends Model<M,P,O>, P extends Model.Parameters, O extends Model.Output> extends Lockable<M> {
public interface DeepFeatures {
Frame scoreAutoEncoder(Frame frame, Key destination_key, boolean reconstruction_error_per_feature);
Frame scoreDeepFeatures(Frame frame, final int layer);
}
public interface GLRMArchetypes {
Frame scoreReconstruction(Frame frame, Key destination_key, boolean reverse_transform);
Frame scoreArchetypes(Frame frame, Key destination_key, boolean reverse_transform);
}
public interface LeafNodeAssignment {
Frame scoreLeafNodeAssignment(Frame frame, Key destination_key);
}
/**
* Default threshold for assigning class labels to the target class (for binomial models)
* @return threshold in 0...1
*/
public final double defaultThreshold() {
if (_output.nclasses() != 2 || _output._training_metrics == null)
return 0.5;
if (_output._validation_metrics != null && ((ModelMetricsBinomial)_output._validation_metrics)._auc != null)
return ((ModelMetricsBinomial)_output._validation_metrics)._auc.defaultThreshold();
if (((ModelMetricsBinomial)_output._training_metrics)._auc != null)
return ((ModelMetricsBinomial)_output._training_metrics)._auc.defaultThreshold();
return 0.5;
}
public final boolean isSupervised() { return _output.isSupervised(); }
/** Model-specific parameter class. Each model sub-class contains
* instance of one of these containing its builder parameters, with
* model-specific parameters. E.g. KMeansModel extends Model and has a
* KMeansParameters extending Model.Parameters; sample parameters include K,
* whether or not to normalize, max iterations and the initial random seed.
*
* <p>The non-transient fields are input parameters to the model-building
* process, and are considered "first class citizens" by the front-end - the
* front-end will cache Parameters (in the browser, in JavaScript, on disk)
* and rebuild Parameter instances from those caches.
*
* WARNING: Model Parameters is not immutable object and ModelBuilder can modify
* them!
*/
public abstract static class Parameters extends Iced<Parameters> {
/** Maximal number of supported levels in response. */
public static final int MAX_SUPPORTED_LEVELS = 1000;
/** The short name, used in making Keys. e.g. "GBM" */
abstract public String algoName();
/** The pretty algo name for this Model (e.g., Gradient Boosting Method, rather than GBM).*/
abstract public String fullName();
/** The Java class name for this Model (e.g., hex.tree.gbm.GBM, rather than GBM).*/
abstract public String javaName();
public Key<Frame> _train; // User-Key of the Frame the Model is trained on
public Key<Frame> _valid; // User-Key of the Frame the Model is validated on, if any
public int _nfolds = 0;
public boolean _keep_cross_validation_predictions = false;
public boolean _parallelize_cross_validation = true;
public enum FoldAssignmentScheme {
AUTO, Random, Modulo, Stratified
}
protected long nFoldSeed() { return new Random().nextLong(); }
public FoldAssignmentScheme _fold_assignment = FoldAssignmentScheme.AUTO;
public Distribution.Family _distribution = Distribution.Family.AUTO;
public double _tweedie_power = 1.5;
public double _quantile_alpha = 0.5;
protected double defaultStoppingTolerance() { return 1e-3; }
abstract public long progressUnits();
// TODO: This field belongs in the front-end column-selection process and
// NOT in the parameters - because this requires all model-builders to have
// column strip/ignore code.
public String[] _ignored_columns;// column names to ignore for training
public boolean _ignore_const_cols; // True if dropping constant cols
public String _weights_column;
public String _offset_column;
public String _fold_column;
// Scoring a model on a dataset is not free; sometimes it is THE limiting
// factor to model building. By default, partially built models are only
// scored every so many major model iterations - throttled to limit scoring
// costs to less than 10% of the build time. This flag forces scoring for
// every iteration, allowing e.g. more fine-grained progress reporting.
public boolean _score_each_iteration;
/**
* Maximum allowed runtime in seconds for model training. Use 0 to disable.
*/
public double _max_runtime_secs = 0;
/**
* Early stopping based on convergence of stopping_metric.
* Stop if simple moving average of the stopping_metric does not improve by stopping_tolerance for
* k scoring events.
* Can only trigger after at least 2k scoring events. Use 0 to disable.
*/
public int _stopping_rounds = 0;
/**
* Metric to use for convergence checking, only for _stopping_rounds > 0.
*/
public ScoreKeeper.StoppingMetric _stopping_metric = ScoreKeeper.StoppingMetric.AUTO;
/**
* Relative tolerance for metric-based stopping criterion: stop if relative improvement is not at least this much.
*/
public double _stopping_tolerance = defaultStoppingTolerance();
/** Supervised models have an expected response they get to train with! */
public String _response_column; // response column name
/** Should all classes be over/under-sampled to balance the class
* distribution? */
public boolean _balance_classes = false;
/** When classes are being balanced, limit the resulting dataset size to
* the specified multiple of the original dataset size. Maximum relative
* size of the training data after balancing class counts (can be less
* than 1.0) */
public float _max_after_balance_size = 5.0f;
/**
* Desired over/under-sampling ratios per class (lexicographic order).
* Only when balance_classes is enabled.
* If not specified, they will be automatically computed to obtain class balance during training.
*/
public float[] _class_sampling_factors;
/** For classification models, the maximum size (in terms of classes) of
* the confusion matrix for it to be printed. This option is meant to
* avoid printing extremely large confusion matrices. */
public int _max_confusion_matrix_size = 20;
/**
* A model key associated with a previously trained Deep Learning
* model. This option allows users to build a new model as a
* continuation of a previously generated model.
*/
public Key<? extends Model> _checkpoint;
// Public no-arg constructor for reflective creation
public Parameters() { _ignore_const_cols = defaultDropConsCols(); }
/** @return the training frame instance */
public final Frame train() { return _train==null ? null : _train.get(); }
/** @return the validation frame instance, or null
* if a validation frame was not specified */
public final Frame valid() { return _valid==null ? null : _valid.get(); }
/** Read-Lock both training and validation User frames. */
public void read_lock_frames(Job job) {
Frame tr = train();
if (tr != null)
tr.read_lock(job._key);
if (_valid != null && !_train.equals(_valid))
_valid.get().read_lock(job._key);
}
/** Read-UnLock both training and validation User frames. This method is
* called on crashing cleanup pathes, so handles the case where the frames
* are not actually locked. */
public void read_unlock_frames(Job job) {
Frame tr = train();
if( tr != null ) tr.unlock(job._key,false);
if( _valid != null && !_train.equals(_valid) )
valid().unlock(job._key,false);
}
// Override in subclasses to change the default; e.g. true in GLM
protected boolean defaultDropConsCols() { return true; }
/** Type of missing columns during adaptation between train/test datasets
* Overload this method for models that have sparse data handling - a zero
* will preserve the sparseness. Otherwise, NaN is used.
* @return real-valued number (can be NaN) */
public double missingColumnsType() { return Double.NaN; }
public boolean hasCheckpoint() { return _checkpoint != null; }
// FIXME: this is really horrible hack, Model.Parameters has method checksum_impl,
// but not checksum, the API is totally random :(
public long checksum() {
return checksum_impl();
}
/**
* Compute a checksum based on all non-transient non-static ice-able assignable fields (incl. inherited ones) which have @API annotations.
* Sort the fields first, since reflection gives us the fields in random order and we don't want the checksum to be affected by the field order.
* NOTE: if a field is added to a Parameters class the checksum will differ even when all the previous parameters have the same value. If
* a client wants backward compatibility they will need to compare parameter values explicitly.
*
* The method is motivated by standard hash implementation `hash = hash * P + value` but we use high prime numbers in random order.
* @return checksum
*/
protected long checksum_impl() {
long xs = 0x600DL;
int count = 0;
Field[] fields = Weaver.getWovenFields(this.getClass());
Arrays.sort(fields,
new Comparator<Field>() {
public int compare(Field field1, Field field2) {
return field1.getName().compareTo(field2.getName());
}
});
for (Field f : fields) {
final long P = MathUtils.PRIMES[count % MathUtils.PRIMES.length];
Class<?> c = f.getType();
if (c.isArray()) {
try {
f.setAccessible(true);
if (f.get(this) != null) {
if (c.getComponentType() == Integer.TYPE){
int[] arr = (int[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else if (c.getComponentType() == Float.TYPE) {
float[] arr = (float[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else if (c.getComponentType() == Double.TYPE) {
double[] arr = (double[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else if (c.getComponentType() == Long.TYPE){
long[] arr = (long[]) f.get(this);
xs = xs * P + (long) Arrays.hashCode(arr);
} else {
Object[] arr = (Object[]) f.get(this);
xs = xs * P + (long) Arrays.deepHashCode(arr);
} //else lead to ClassCastException
} else {
xs = xs * P;
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (ClassCastException t) {
throw H2O.fail(); //no support yet for int[][] etc.
}
} else {
try {
f.setAccessible(true);
Object value = f.get(this);
if (value != null) {
xs = xs * P + (long)(value.hashCode());
} else {
xs = xs * P + P;
}
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
count++;
}
xs ^= (train() == null ? 43 : train().checksum()) * (valid() == null ? 17 : valid().checksum());
return xs;
}
}
public P _parms; // TODO: move things around so that this can be protected
public String [] _warnings = new String[0];
public void addWarning(String s){
_warnings = Arrays.copyOf(_warnings,_warnings.length+1);
_warnings[_warnings.length-1] = s;
}
/** Model-specific output class. Each model sub-class contains an instance
* of one of these containing its "output": the pieces of the model needed
* for scoring. E.g. KMeansModel has a KMeansOutput extending Model.Output
* which contains the cluster centers. The output also includes the names,
* domains and other fields which are determined at training time. */
public abstract static class Output extends Iced {
/** Columns used in the model and are used to match up with scoring data
* columns. The last name is the response column name (if any). */
public String _names[];
/** List of Keys to cross-validation models (non-null iff _parms._nfolds > 1 or _parms._fold_column != null) **/
public Key _cross_validation_models[];
/** List of Keys to cross-validation predictions (if requested) **/
public Key _cross_validation_predictions[];
public Output(){this(false,false,false);}
public Output(boolean hasWeights, boolean hasOffset, boolean hasFold) {
_hasWeights = hasWeights;
_hasOffset = hasOffset;
_hasFold = hasFold;
}
/** Any final prep-work just before model-building starts, but after the
* user has clicked "go". E.g., converting a response column to an categorical
* touches the entire column (can be expensive), makes a parallel vec
* (Key/Data leak management issues), and might throw IAE if there are too
* many classes. */
public Output( ModelBuilder b ) {
_job = b._job;
if( b == null ) {
_hasOffset = false;
_hasWeights = false;
_hasFold = false;
return;
}
_isSupervised = b.isSupervised();
if( b.error_count() > 0 )
throw new IllegalArgumentException(b.validationErrors());
// Capture the data "shape" the model is valid on
_names = b._train.names ();
_domains= b._train.domains();
_hasOffset = b.hasOffsetCol();
_hasWeights = b.hasWeightCol();
_hasFold = b.hasFoldCol();
_distribution = b._distribution;
_priorClassDist = b._priorClassDist;
}
/** Returns number of input features (OK for most supervised methods, need to override for unsupervised!) */
public int nfeatures() {
return _names.length - (_hasOffset?1:0) - (_hasWeights?1:0) - (_hasFold?1:0) - (isSupervised()?1:0);
}
/** Categorical/factor mappings, per column. Null for non-categorical cols.
* Columns match the post-init cleanup columns. The last column holds the
* response col categoricals for SupervisedModels. */
public String _domains[][];
/** List of all the associated ModelMetrics objects, so we can delete them
* when we delete this model. */
Key[] _model_metrics = new Key[0];
/** Job info: final status (canceled, crashed), build time */
public Job _job;
/**
* Training set metrics obtained during model training
*/
public ModelMetrics _training_metrics;
/**
* Validation set metrics obtained during model training (if a validation data set was specified)
*/
public ModelMetrics _validation_metrics;
/**
* Cross-Validation metrics obtained during model training
*/
public ModelMetrics _cross_validation_metrics;
/**
* User-facing model summary - Display model type, complexity, size and other useful stats
*/
public TwoDimTable _model_summary;
/**
* User-facing model scoring history - 2D table with modeling accuracy as a function of time/trees/epochs/iterations, etc.
*/
public TwoDimTable _scoring_history;
protected boolean _isSupervised;
public boolean isSupervised() { return _isSupervised; }
/** The name of the response column (which is always the last column). */
protected final boolean _hasOffset; // weights and offset are kept at designated position in the names array
protected final boolean _hasWeights;// only need to know if we have them
protected final boolean _hasFold;// only need to know if we have them
public boolean hasOffset () { return _hasOffset;}
public boolean hasWeights () { return _hasWeights;}
public boolean hasFold () { return _hasFold;}
public String responseName() { return isSupervised()?_names[responseIdx()]:null;}
public String weightsName () { return _hasWeights ?_names[weightsIdx()]:null;}
public String offsetName () { return _hasOffset ?_names[offsetIdx()]:null;}
public String foldName () { return _hasFold ?_names[foldIdx()]:null;}
// Vec layout is [c1,c2,...,cn,w?,o?,r], cn are predictor cols, r is response, w and o are weights and offset, both are optional
public int weightsIdx () {
if(!_hasWeights) return -1;
return _names.length - (isSupervised()?1:0) - (hasOffset()?1:0) - 1 - (hasFold()?1:0);
}
public int offsetIdx () {
if(!_hasOffset) return -1;
return _names.length - (isSupervised()?1:0) - (hasFold()?1:0) - 1;
}
public int foldIdx () {
if(!_hasFold) return -1;
return _names.length - (isSupervised()?1:0) - 1;
}
public int responseIdx () {
if(!isSupervised()) return -1;
return _names.length-1;
}
/** The names of the levels for an categorical response column. */
public String[] classNames() { assert isSupervised();
return _domains[_domains.length-1];
}
/** Is this model a classification model? (v. a regression or clustering model) */
public boolean isClassifier() { return isSupervised() && nclasses() > 1; }
/** Is this model a binomial classification model? (v. a regression or clustering model) */
public boolean isBinomialClassifier() { return isSupervised() && nclasses() == 2; }
public int nclasses() {
assert isSupervised();
String cns[] = classNames();
return cns==null ? 1 : cns.length;
}
public double [] _distribution;
public double [] _modelClassDist;
public double [] _priorClassDist;
// Note: some algorithms MUST redefine this method to return other model categories
public ModelCategory getModelCategory() {
if(isSupervised())
return (isClassifier() ?
(nclasses() > 2 ? ModelCategory.Multinomial : ModelCategory.Binomial) :
ModelCategory.Regression);
return ModelCategory.Unknown;
}
public boolean isAutoencoder() { return false; } // Override in DeepLearning and so on.
public synchronized ModelMetrics addModelMetrics(ModelMetrics mm) {
DKV.put(mm);
for( Key key : _model_metrics ) // Dup removal
if( key==mm._key ) return mm;
_model_metrics = Arrays.copyOf(_model_metrics, _model_metrics.length + 1);
_model_metrics[_model_metrics.length - 1] = mm._key;
return mm; // Flow coding
}
public synchronized void clearModelMetrics() { _model_metrics = new Key[0]; }
protected long checksum_impl() {
return (null == _names ? 13 : Arrays.hashCode(_names)) *
(null == _domains ? 17 : Arrays.deepHashCode(_domains)) *
getModelCategory().ordinal();
}
public void printTwoDimTables(StringBuilder sb, Object o) {
for (Field f : Weaver.getWovenFields(o.getClass())) {
Class<?> c = f.getType();
if (c.isAssignableFrom(TwoDimTable.class)) {
try {
TwoDimTable t = (TwoDimTable) f.get(this);
f.setAccessible(true);
if (t != null) sb.append(t.toString(1,false /*don't print the full table if too long*/));
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
}
@Override public String toString() {
StringBuilder sb = new StringBuilder();
if (_training_metrics!=null) sb.append(_training_metrics.toString());
if (_validation_metrics!=null) sb.append(_validation_metrics.toString());
if (_cross_validation_metrics!=null) sb.append(_cross_validation_metrics.toString());
printTwoDimTables(sb, this);
return sb.toString();
}
} // Output
protected String[][] scoringDomains() {return _output._domains;}
public O _output; // TODO: move things around so that this can be protected
public ModelMetrics addMetrics(ModelMetrics mm) { return _output.addModelMetrics(mm); }
public abstract ModelMetrics.MetricBuilder makeMetricBuilder(String[] domain);
/** Full constructor */
public Model( Key selfKey, P parms, O output) {
super(selfKey);
_parms = parms ; assert parms != null;
_output = output; // Output won't be set if we're assert output != null;
}
/**
* Deviance of given distribution function at predicted value f
* @param w observation weight
* @param y (actual) response
* @param f (predicted) response in original response space
* @return value of gradient
*/
public double deviance(double w, double y, double f) {
return new Distribution(Distribution.Family.gaussian).deviance(w, y, f);
}
protected ScoringInfo[] scoringInfo;
public ScoringInfo[] scoring_history() { return scoringInfo; }
/**
* Fill a ScoringInfo with data from the ModelMetrics for this model.
* @param scoringInfo
*/
public void fillScoringInfo(ScoringInfo scoringInfo) {
scoringInfo.is_classification = this._output.isClassifier();
scoringInfo.is_autoencoder = _output.isAutoencoder();
scoringInfo.scored_train = new ScoreKeeper(this._output._training_metrics);
scoringInfo.scored_valid = new ScoreKeeper(this._output._validation_metrics);
scoringInfo.scored_xval = new ScoreKeeper(this._output._cross_validation_metrics);
scoringInfo.validation = _output._validation_metrics != null;
scoringInfo.cross_validation = _output._cross_validation_metrics != null;
if (this._output.isBinomialClassifier()) {
scoringInfo.training_AUC = ((ModelMetricsBinomial)this._output._training_metrics)._auc;
scoringInfo.validation_AUC = this._output._validation_metrics == null ? null : ((ModelMetricsBinomial)this._output._validation_metrics)._auc;
}
}
// return the most up-to-date model metrics
public ScoringInfo last_scored() { return scoringInfo == null ? null : scoringInfo[scoringInfo.length-1]; }
// Lower is better
public float loss() {
switch (_parms._stopping_metric) {
case MSE:
return (float) mse();
case logloss:
return (float) logloss();
case deviance:
return (float) deviance();
case misclassification:
return (float) classification_error();
case AUC:
return (float)(1-auc());
case AUTO:
default:
return (float) (_output.isClassifier() ? logloss() : _output.isAutoencoder() ? mse() : deviance());
}
} // loss()
public int compareTo(M o) {
if (o._output.isClassifier() != _output.isClassifier()) throw new UnsupportedOperationException("Cannot compare classifier against regressor.");
if (o._output.isClassifier()) {
if (o._output.nclasses() != _output.nclasses())
throw new UnsupportedOperationException("Cannot compare models with different number of classes.");
}
return (loss() < o.loss() ? -1 : loss() > o.loss() ? 1 : 0);
}
public double classification_error() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._classError : last_scored().scored_train._classError;
}
public double mse() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._mse : last_scored().scored_train._mse;
}
public double auc() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._AUC : last_scored().scored_train._AUC;
}
public double deviance() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._mean_residual_deviance : last_scored().scored_train._mean_residual_deviance;
}
public double logloss() {
if (scoringInfo == null) return Double.NaN;
return last_scored().validation ? last_scored().scored_valid._logloss : last_scored().scored_train._logloss;
}
/** Adapt a Test/Validation Frame to be compatible for a Training Frame. The
* intention here is that ModelBuilders can assume the test set has the same
* count of columns, and within each factor column the same set of
* same-numbered levels. Extra levels are renumbered past those in the
* Train set but will still be present in the Test set, thus requiring
* range-checking.
*
* This routine is used before model building (with no Model made yet) to
* check for compatible datasets, and also used to prepare a large dataset
* for scoring (with a Model).
*
* Adaption does the following things:
* - Remove any "extra" Vecs appearing only in the test and not the train
* - Insert any "missing" Vecs appearing only in the train and not the test
* with all NAs ({@see missingColumnsType}). This will issue a warning,
* and if the "expensive" flag is false won't actually make the column
* replacement column but instead will bail-out on the whole adaption (but
* will continue looking for more warnings).
* - If all columns are missing, issue an error.
* - Renumber matching cat levels to match the Train levels; this might make
* "holes" in the Test set cat levels, if some are not in the Test set.
* - Extra Test levels are renumbered past the end of the Train set, hence
* the train and test levels match up to all the train levels; there might
* be extra Test levels past that.
* - For all mis-matched levels, issue a warning.
*
* The {@code test} frame is updated in-place to be compatible, by altering
* the names and Vecs; make a defensive copy if you do not want it modified.
* There is a fast-path cutout if the test set is already compatible. Since
* the test-set is conditionally modifed with extra CategoricalWrappedVec optionally
* added it is recommended to use a Scope enter/exit to track Vec lifetimes.
*
* @param test Testing Frame, updated in-place
* @param expensive Try hard to adapt; this might involve the creation of
* whole Vecs and thus get expensive. If {@code false}, then only adapt if
* no warnings and errors; otherwise just the messages are produced.
* Created Vecs have to be deleted by the caller (e.g. Scope.enter/exit).
* @return Array of warnings; zero length (never null) for no warnings.
* Throws {@code IllegalArgumentException} if no columns are in common, or
* if any factor column has no levels in common.
*/
public String[] adaptTestForTrain( Frame test, boolean expensive, boolean computeMetrics) {
return adaptTestForTrain(_output._names, _output.weightsName(), _output.offsetName(), _output.foldName(), _output.responseName(), _output._domains, test, _parms.missingColumnsType(), expensive, computeMetrics);
}
/**
* @param names Training column names
* @param weights Name of column with observation weights, weights are NOT filled in if missing in test frame
* @param offset Name of column with offset, if not null (i.e. trained with offset), offset MUST be present in test data as well, otherwise can not scorew and IAE is thrown.
* @param fold
* @param response Name of response column, response is NOT filled in if missing in test frame
* @param domains Training column levels
* @param missing Substitute for missing columns; usually NaN
* */
public static String[] adaptTestForTrain(String[] names, String weights, String offset, String fold, String response, String[][] domains, Frame test, double missing, boolean expensive, boolean computeMetrics) throws IllegalArgumentException {
if( test == null) return new String[0];
// Fast path cutout: already compatible
String[][] tdomains = test.domains();
if( names == test._names && domains == tdomains )
return new String[0];
// Fast path cutout: already compatible but needs work to test
if( Arrays.equals(names,test._names) && Arrays.deepEquals(domains,tdomains) )
return new String[0];
// Build the validation set to be compatible with the training set.
// Toss out extra columns, complain about missing ones, remap categoricals
ArrayList<String> msgs = new ArrayList<>();
Vec vvecs[] = new Vec[names.length];
int good = 0; // Any matching column names, at all?
int convNaN = 0;
for( int i=0; i<names.length; i++ ) {
Vec vec = test.vec(names[i]); // Search in the given validation set
// For supervised problems, if the test set has no response, then we don't fill that in with NAs.
boolean isResponse = response != null && names[i].equals(response);
boolean isWeights = weights != null && names[i].equals(weights);
boolean isOffset = offset != null && names[i].equals(offset);
boolean isFold = fold != null && names[i].equals(fold);
if(vec == null && isResponse && computeMetrics)
throw new IllegalArgumentException("Test/Validation dataset is missing response vector '" + response + "'");
if(vec == null && isOffset)
throw new IllegalArgumentException("Test/Validation dataset is missing offset vector '" + offset + "'");
if(vec == null && isWeights && computeMetrics && expensive) {
vec = test.anyVec().makeCon(1);
msgs.add(H2O.technote(1, "Test/Validation dataset is missing the weights column '" + names[i] + "' (needed because a response was found and metrics are to be computed): substituting in a column of 1s"));
//throw new IllegalArgumentException(H2O.technote(1, "Test dataset is missing weights vector '" + weights + "' (needed because a response was found and metrics are to be computed)."));
}
// If a training set column is missing in the validation set, complain and fill in with NAs.
if( vec == null) {
String str = null;
if( expensive ) {
if (isFold) {
str = "Test/Validation dataset is missing fold column '" + names[i] + "': substituting in a column of 0s";
vec = test.anyVec().makeCon(0);
} else {
str = "Test/Validation dataset is missing training column '" + names[i] + "': substituting in a column of NAs";
vec = test.anyVec().makeCon(missing);
convNaN++;
}
vec.setDomain(domains[i]);
}
msgs.add(str);
}
if( vec != null ) { // I have a column with a matching name
if( domains[i] != null ) { // Model expects an categorical
if (vec.isString())
vec = VecUtils.stringToCategorical(vec); //turn a String column into a categorical column (we don't delete the original vec here)
if( vec.domain() != domains[i] && !Arrays.equals(vec.domain(),domains[i]) ) { // Result needs to be the same categorical
CategoricalWrappedVec evec;
try {
evec = vec.adaptTo(domains[i]); // Convert to categorical or throw IAE
} catch( NumberFormatException nfe ) {
throw new IllegalArgumentException("Test/Validation dataset has a non-categorical column '"+names[i]+"' which is categorical in the training data");
}
String[] ds = evec.domain();
assert ds != null && ds.length >= domains[i].length;
if( isResponse && vec.domain() != null && ds.length == domains[i].length+vec.domain().length )
throw new IllegalArgumentException("Test/Validation dataset has a categorical response column '"+names[i]+"' with no levels in common with the model");
if (ds.length > domains[i].length)
msgs.add("Test/Validation dataset column '" + names[i] + "' has levels not trained on: " + Arrays.toString(Arrays.copyOfRange(ds, domains[i].length, ds.length)));
if (expensive) { vec = evec; good++; } // Keep it
else { evec.remove(); vec = null; } // No leaking if not-expensive
} else {
good++;
}
} else if( vec.isCategorical() ) {
throw new IllegalArgumentException("Test/Validation dataset has categorical column '"+names[i]+"' which is real-valued in the training data");
} else {
good++; // Assumed compatible; not checking e.g. Strings vs UUID
}
}
vvecs[i] = vec;
}
if( good == convNaN )
throw new IllegalArgumentException("Test/Validation dataset has no columns in common with the training set");
if( good == names.length || (response != null && test.find(response) == -1 && good == names.length - 1) ) // Only update if got something for all columns
test.restructure(names,vvecs,good);
return msgs.toArray(new String[msgs.size()]);
}
/**
* Bulk score the frame, and auto-name the resulting predictions frame.
* @see #score(Frame, String)
* @param fr frame which should be scored
* @return A new frame containing a predicted values. For classification it
* contains a column with prediction and distribution for all
* response classes. For regression it contains only one column with
* predicted values.
* @throws IllegalArgumentException
*/
public Frame score(Frame fr) throws IllegalArgumentException {
return score(fr, null);
}
/** Bulk score the frame {@code fr}, producing a Frame result; the 1st
* Vec is the predicted class, the remaining Vecs are the probability
* distributions. For Regression (single-class) models, the 1st and only
* Vec is the prediction value. The result is in the DKV; caller is
* responsible for deleting.
*
* @param fr frame which should be scored
* @return A new frame containing a predicted values. For classification it
* contains a column with prediction and distribution for all
* response classes. For regression it contains only one column with
* predicted values.
* @throws IllegalArgumentException
*/
public Frame score(Frame fr, String destination_key) throws IllegalArgumentException {
Frame adaptFr = new Frame(fr);
boolean computeMetrics = (!isSupervised() || adaptFr.find(_output.responseName()) != -1);
adaptTestForTrain(adaptFr,true, computeMetrics); // Adapt
Frame output = predictScoreImpl(fr, adaptFr, destination_key); // Predict & Score
// Log modest confusion matrices
Vec predicted = output.vecs()[0]; // Modeled/predicted response
String mdomain[] = predicted.domain(); // Domain of predictions (union of test and train)
// Output is in the model's domain, but needs to be mapped to the scored
// dataset's domain.
if(_output.isClassifier() && computeMetrics) {
// assert(mdomain != null); // label must be categorical
ModelMetrics mm = ModelMetrics.getFromDKV(this,fr);
ConfusionMatrix cm = mm.cm();
if (cm != null && cm._domain != null) //don't print table for regression
if( cm._cm.length < _parms._max_confusion_matrix_size/*Print size limitation*/ ) {
Log.info(cm.table().toString(1));
}
if (mm.hr() != null) {
Log.info(getHitRatioTable(mm.hr()));
}
Vec actual = fr.vec(_output.responseName());
if( actual != null ) { // Predict does not have an actual, scoring does
String sdomain[] = actual.domain(); // Scored/test domain; can be null
if (sdomain != null && mdomain != sdomain && !Arrays.equals(mdomain, sdomain))
output.replace(0, new CategoricalWrappedVec(actual.group().addVec(), actual._rowLayout, sdomain, predicted._key));
}
}
cleanup_adapt(adaptFr, fr);
return output;
}
// Remove temp keys. TODO: Really should use Scope but Scope does not
// currently allow nested-key-keepers.
static protected void cleanup_adapt( Frame adaptFr, Frame fr ) {
Key[] keys = adaptFr.keys();
for( int i=0; i<keys.length; i++ )
if( fr.find(keys[i]) != -1 ) // Exists in the original frame?
keys[i] = null; // Do not delete it
adaptFr.delete();
}
/** Score an already adapted frame. Returns a new Frame with new result
* vectors, all in the DKV. Caller responsible for deleting. Input is
* already adapted to the Model's domain, so the output is also. Also
* computes the metrics for this frame.
*
* @param adaptFrm Already adapted frame
* @return A Frame containing the prediction column, and class distribution
*/
protected Frame predictScoreImpl(Frame fr, Frame adaptFrm, String destination_key) {
final boolean computeMetrics = (!isSupervised() || adaptFrm.find(_output.responseName()) != -1);
// Build up the names & domains.
final int nc = _output.nclasses();
final int ncols = nc==1?1:nc+1; // Regression has 1 predict col; classification also has class distribution
String[] names = new String[ncols];
String[][] domains = new String[ncols][];
names[0] = "predict";
for(int i = 1; i < names.length; ++i) {
names[i] = _output.classNames()[i - 1];
// turn integer class labels such as 0, 1, etc. into p0, p1, etc.
try {
Integer.valueOf(names[i]);
names[i] = "p" + names[i];
} catch (Throwable t) {
// do nothing, non-integer names are fine already
}
}
domains[0] = nc==1 ? null : !computeMetrics ? _output._domains[_output._domains.length-1] : adaptFrm.lastVec().domain();
// Score the dataset, building the class distribution & predictions
BigScore bs = new BigScore(domains[0],ncols,adaptFrm.means(),_output.hasWeights() && adaptFrm.find(_output.weightsName()) >= 0,computeMetrics, true /*make preds*/).doAll(ncols, Vec.T_NUM, adaptFrm);
if (computeMetrics)
bs._mb.makeModelMetrics(this, fr, adaptFrm, bs.outputFrame());
return bs.outputFrame((null == destination_key ? Key.make() : Key.make(destination_key)), names, domains);
}
/** Score an already adapted frame. Returns a MetricBuilder that can be used to make a model metrics.
* @param adaptFrm Already adapted frame
* @return MetricBuilder
*/
protected ModelMetrics.MetricBuilder scoreMetrics(Frame adaptFrm) {
final boolean computeMetrics = (!isSupervised() || adaptFrm.find(_output.responseName()) != -1);
// Build up the names & domains.
final int nc = _output.nclasses();
final int ncols = nc==1?1:nc+1; // Regression has 1 predict col; classification also has class distribution
String[] names = new String[ncols];
String[][] domains = new String[ncols][];
names[0] = "predict";
for(int i = 1; i < names.length; ++i) {
names[i] = _output.classNames()[i - 1];
// turn integer class labels such as 0, 1, etc. into p0, p1, etc.
try {
Integer.valueOf(names[i]);
names[i] = "p" + names[i];
} catch (Throwable t) {
// do nothing, non-integer names are fine already
}
}
domains[0] = nc==1 ? null : !computeMetrics ? _output._domains[_output._domains.length-1] : adaptFrm.lastVec().domain();
// Score the dataset, building the class distribution & predictions
BigScore bs = new BigScore(domains[0],ncols,adaptFrm.means(),_output.hasWeights() && adaptFrm.find(_output.weightsName()) >= 0,computeMetrics, false /*no preds*/).doAll(adaptFrm);
return bs._mb;
}
private class BigScore extends MRTask<BigScore> {
final String[] _domain; // Prediction domain; union of test and train classes
final int _npredcols; // Number of columns in prediction; nclasses+1 - can be less than the prediction domain
ModelMetrics.MetricBuilder _mb;
final double[] _mean; // Column means of test frame
final boolean _computeMetrics; // Column means of test frame
final boolean _hasWeights;
final boolean _makePreds;
BigScore( String[] domain, int ncols, double[] mean, boolean testHasWeights, boolean computeMetrics, boolean makePreds ) {
_domain = domain; _npredcols = ncols; _mean = mean; _computeMetrics = computeMetrics; _makePreds = makePreds;
if(_output._hasWeights && _computeMetrics && !testHasWeights)
throw new IllegalArgumentException("Missing weights when computing validation metrics.");
_hasWeights = testHasWeights;
}
@Override public void map( Chunk chks[], NewChunk cpreds[] ) {
if (isCancelled()) return;
Chunk weightsChunk = _hasWeights && _computeMetrics ? chks[_output.weightsIdx()] : new C0DChunk(1, chks[0]._len);
Chunk offsetChunk = _output.hasOffset() ? chks[_output.offsetIdx()] : new C0DChunk(0, chks[0]._len);
Chunk responseChunk = null;
double [] tmp = new double[_output.nfeatures()];
float [] actual = null;
_mb = Model.this.makeMetricBuilder(_domain);
if (_computeMetrics) {
if (isSupervised()) {
actual = new float[1];
responseChunk = chks[_output.responseIdx()];
} else
actual = new float[chks.length];
}
double[] preds = _mb._work; // Sized for the union of test and train classes
int len = chks[0]._len;
for (int row = 0; row < len; row++) {
double weight = weightsChunk.atd(row);
if (weight == 0) {
if (_makePreds) {
for (int c = 0; c < _npredcols; c++) // Output predictions; sized for train only (excludes extra test classes)
cpreds[c].addNum(0);
}
continue;
}
double offset = offsetChunk.atd(row);
double [] p = score0(chks, weight, offset, row, tmp, preds);
if (_computeMetrics) {
if(isSupervised()) {
actual[0] = (float)responseChunk.atd(row);
} else {
for(int i = 0; i < actual.length; ++i)
actual[i] = (float)chks[i].atd(row);
}
_mb.perRow(preds, actual, weight, offset, Model.this);
}
if (_makePreds) {
for (int c = 0; c < _npredcols; c++) // Output predictions; sized for train only (excludes extra test classes)
cpreds[c].addNum(p[c]);
}
}
}
@Override public void reduce( BigScore bs ) { if(_mb != null)_mb.reduce(bs._mb); }
@Override protected void postGlobal() { if(_mb != null)_mb.postGlobal(); }
}
/** Bulk scoring API for one row. Chunks are all compatible with the model,
* and expect the last Chunks are for the final distribution and prediction.
* Default method is to just load the data into the tmp array, then call
* subclass scoring logic. */
public double[] score0( Chunk chks[], int row_in_chunk, double[] tmp, double[] preds ) {
return score0(chks, 1, 0, row_in_chunk, tmp, preds);
}
public double[] score0( Chunk chks[], double weight, double offset, int row_in_chunk, double[] tmp, double[] preds ) {
assert(_output.nfeatures() == tmp.length);
for( int i=0; i< tmp.length; i++ )
tmp[i] = chks[i].atd(row_in_chunk);
double [] scored = score0(tmp, preds, weight, offset);
if(isSupervised()) {
// Correct probabilities obtained from training on oversampled data back to original distribution
// C.f. http://gking.harvard.edu/files/0s.pdf Eq.(27)
if( _output.isClassifier()) {
if (_parms._balance_classes)
GenModel.correctProbabilities(scored, _output._priorClassDist, _output._modelClassDist);
//assign label at the very end (after potentially correcting probabilities)
scored[0] = hex.genmodel.GenModel.getPrediction(scored, _output._priorClassDist, tmp, defaultThreshold());
}
}
return scored;
}
/** Subclasses implement the scoring logic. The data is pre-loaded into a
* re-used temp array, in the order the model expects. The predictions are
* loaded into the re-used temp array, which is also returned. */
protected abstract double[] score0(double data[/*ncols*/], double preds[/*nclasses+1*/]);
/**Override scoring logic for models that handle weight/offset**/
protected double[] score0(double data[/*ncols*/], double preds[/*nclasses+1*/], double weight, double offset) {
assert (weight == 1 && offset == 0) : "Override this method for non-trivial weight/offset!";
return score0(data, preds);
}
// Version where the user has just ponied-up an array of data to be scored.
// Data must be in proper order. Handy for JUnit tests.
public double score(double[] data){ return ArrayUtils.maxIndex(score0(data, new double[_output.nclasses()])); }
@Override protected Futures remove_impl( Futures fs ) {
if (_output._model_metrics != null)
for( Key k : _output._model_metrics )
k.remove(fs);
return super.remove_impl(fs);
}
/** Write out K/V pairs, in this case model metrics. */
@Override protected AutoBuffer writeAll_impl(AutoBuffer ab) {
if (_output._model_metrics != null)
for( Key k : _output._model_metrics )
ab.putKey(k);
return super.writeAll_impl(ab);
}
@Override protected Keyed readAll_impl(AutoBuffer ab, Futures fs) {
if (_output._model_metrics != null)
for( Key k : _output._model_metrics )
ab.getKey(k,fs); // Load model metrics
return super.readAll_impl(ab,fs);
}
@Override protected long checksum_impl() { return _parms.checksum_impl() * _output.checksum_impl(); }
// ==========================================================================
/** Return a String which is a valid Java program representing a class that
* implements the Model. The Java is of the form:
* <pre>
* class UUIDxxxxModel {
* public static final String NAMES[] = { ....column names... }
* public static final String DOMAINS[][] = { ....domain names... }
* // Pass in data in a double[], pre-aligned to the Model's requirements.
* // Jam predictions into the preds[] array; preds[0] is reserved for the
* // main prediction (class for classifiers or value for regression),
* // and remaining columns hold a probability distribution for classifiers.
* double[] predict( double data[], double preds[] );
* double[] map( HashMap < String,Double > row, double data[] );
* // Does the mapping lookup for every row, no allocation
* double[] predict( HashMap < String,Double > row, double data[], double preds[] );
* // Allocates a double[] for every row
* double[] predict( HashMap < String,Double > row, double preds[] );
* // Allocates a double[] and a double[] for every row
* double[] predict( HashMap < String,Double > row );
* }
* </pre>
*/
public final String toJava(boolean preview, boolean verboseCode) {
// 32k buffer by default
ByteArrayOutputStream os = new ByteArrayOutputStream(Short.MAX_VALUE);
// We do not need to close BAOS
/* ignore returned stream */ toJava(os, preview, verboseCode);
return os.toString();
}
public final SBPrintStream toJava(OutputStream os, boolean preview, boolean verboseCode) {
if (preview /* && toJavaCheckTooBig() */) {
os = new LineLimitOutputStreamWrapper(os, 1000);
}
return toJava(new SBPrintStream(os), preview, verboseCode);
}
protected SBPrintStream toJava(SBPrintStream sb, boolean isGeneratingPreview, boolean verboseCode) {
CodeGeneratorPipeline fileCtx = new CodeGeneratorPipeline(); // preserve file context
String modelName = JCodeGen.toJavaId(_key.toString());
// HEADER
sb.p("/*").nl();
sb.p(" Licensed under the Apache License, Version 2.0").nl();
sb.p(" http://www.apache.org/licenses/LICENSE-2.0.html").nl();
sb.nl();
sb.p(" AUTOGENERATED BY H2O at ").p(new DateTime().toString()).nl();
sb.p(" ").p(H2O.ABV.projectVersion()).nl();
sb.p(" ").nl();
sb.p(" Standalone prediction code with sample test data for ").p(this.getClass().getSimpleName()).p(" named ").p(modelName)
.nl();
sb.nl();
sb.p(" How to download, compile and execute:").nl();
sb.p(" mkdir tmpdir").nl();
sb.p(" cd tmpdir").nl();
sb.p(" curl http:/").p(H2O.SELF.toString()).p("/3/h2o-genmodel.jar > h2o-genmodel.jar").nl();
sb.p(" curl http:/").p(H2O.SELF.toString()).p("/3/Models.java/").pobj(_key).p(" > ").p(modelName).p(".java").nl();
sb.p(" javac -cp h2o-genmodel.jar -J-Xmx2g -J-XX:MaxPermSize=128m ").p(modelName).p(".java").nl();
// Intentionally disabled since there is no main method in generated code
// sb.p("// java -cp h2o-genmodel.jar:. -Xmx2g -XX:MaxPermSize=256m -XX:ReservedCodeCacheSize=256m ").p(modelName).nl();
sb.nl();
sb.p(" (Note: Try java argument -XX:+PrintCompilation to show runtime JIT compiler behavior.)").nl();
if (_parms._offset_column != null) {
sb.nl();
sb.nl();
sb.nl();
sb.p(" NOTE: Java model export does not support offset_column.").nl();
sb.nl();
Log.warn("Java model export does not support offset_column.");
}
if (isGeneratingPreview && toJavaCheckTooBig()) {
sb.nl();
sb.nl();
sb.nl();
sb.p(" NOTE: Java model is too large to preview, please download as shown above.").nl();
sb.nl();
return sb;
}
sb.p("*/").nl();
sb.p("import java.util.Map;").nl();
sb.p("import hex.genmodel.GenModel;").nl();
sb.p("import hex.genmodel.annotations.ModelPojo;").nl();
sb.nl();
String algo = this.getClass().getSimpleName().toLowerCase().replace("model", "");
sb.p("@ModelPojo(name=\"").p(modelName).p("\", algorithm=\"").p(algo).p("\")").nl();
sb.p("public class ").p(modelName).p(" extends GenModel {").nl().ii(1);
sb.ip("public hex.ModelCategory getModelCategory() { return hex.ModelCategory." + _output
.getModelCategory() + "; }").nl();
toJavaInit(sb, fileCtx).nl();
toJavaNAMES(sb, fileCtx);
toJavaNCLASSES(sb);
toJavaDOMAINS(sb, fileCtx);
toJavaPROB(sb);
toJavaSuper(modelName, sb); //
sb.p(" public String getUUID() { return Long.toString("+checksum()+"L); }").nl();
toJavaPredict(sb, fileCtx, verboseCode);
sb.p("}").nl().di(1);
fileCtx.generate(sb); // Append file context
sb.nl();
return sb;
}
/** Generate implementation for super class. */
protected SBPrintStream toJavaSuper(String modelName, SBPrintStream sb) {
return sb.nl().ip("public " + modelName + "() { super(NAMES,DOMAINS); }").nl();
}
private SBPrintStream toJavaNAMES(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
final String modelName = JCodeGen.toJavaId(_key.toString());
final String namesHolderClassName = "NamesHolder_"+modelName;
sb.i().p("// ").p("Names of columns used by model.").nl();
sb.i().p("public static final String[] NAMES = "+namesHolderClassName+".VALUES;").nl();
// Generate class which fills the names into array
fileCtx.add(new CodeGenerator() {
@Override
public void generate(JCodeSB out) {
out.i().p("// The class representing training column names").nl();
JCodeGen.toClassWithArray(out, null, namesHolderClassName,
Arrays.copyOf(_output._names, _output.nfeatures()));
}
});
return sb;
}
protected SBPrintStream toJavaNCLASSES(SBPrintStream sb ) {
return _output.isClassifier() ? JCodeGen.toStaticVar(sb, "NCLASSES",
_output.nclasses(),
"Number of output classes included in training data response column.")
: sb;
}
private SBPrintStream toJavaDOMAINS(SBPrintStream sb, CodeGeneratorPipeline fileCtx) {
String modelName = JCodeGen.toJavaId(_key.toString());
sb.nl();
sb.ip("// Column domains. The last array contains domain of response column.").nl();
sb.ip("public static final String[][] DOMAINS = new String[][] {").nl();
String [][] domains = scoringDomains();
for (int i=0; i< domains.length; i++) {
final int idx = i;
final String[] dom = domains[i];
final String colInfoClazz = modelName+"_ColInfo_"+i;
sb.i(1).p("/* ").p(_output._names[i]).p(" */ ");
if (dom != null) sb.p(colInfoClazz).p(".VALUES"); else sb.p("null");
if (i!=domains.length-1) sb.p(',');
sb.nl();
// Right now do not generate the class representing column
// since it does not hold any interesting information except String array holding domain
if (dom != null) {
fileCtx.add(new CodeGenerator() {
@Override
public void generate(JCodeSB out) {
out.ip("// The class representing column ").p(_output._names[idx]).nl();
JCodeGen.toClassWithArray(out, null, colInfoClazz, dom);
}
}
);
}
}
return sb.ip("};").nl();
}
protected SBPrintStream toJavaPROB(SBPrintStream sb) {
if(isSupervised()) {
JCodeGen.toStaticVar(sb, "PRIOR_CLASS_DISTRIB", _output._priorClassDist, "Prior class distribution");
JCodeGen.toStaticVar(sb, "MODEL_CLASS_DISTRIB", _output._modelClassDist, "Class distribution used for model building");
}
return sb;
}
protected boolean toJavaCheckTooBig() {
Log.warn("toJavaCheckTooBig must be overridden for this model type to render it in the browser");
return true;
}
// Override in subclasses to provide some top-level model-specific goodness
protected SBPrintStream toJavaInit(SBPrintStream sb, CodeGeneratorPipeline fileContext) { return sb; }
// Override in subclasses to provide some inside 'predict' call goodness
// Method returns code which should be appended into generated top level class after
// predict method.
protected void toJavaPredictBody(SBPrintStream body,
CodeGeneratorPipeline classCtx,
CodeGeneratorPipeline fileCtx,
boolean verboseCode) {
throw new IllegalArgumentException("This model type does not support conversion to Java");
}
// Wrapper around the main predict call, including the signature and return value
private SBPrintStream toJavaPredict(SBPrintStream ccsb,
CodeGeneratorPipeline fileCtx,
boolean verboseCode) { // ccsb = classContext
ccsb.nl();
ccsb.ip("// Pass in data in a double[], pre-aligned to the Model's requirements.").nl();
ccsb.ip("// Jam predictions into the preds[] array; preds[0] is reserved for the").nl();
ccsb.ip("// main prediction (class for classifiers or value for regression),").nl();
ccsb.ip("// and remaining columns hold a probability distribution for classifiers.").nl();
ccsb.ip("public final double[] score0( double[] data, double[] preds ) {").nl();
CodeGeneratorPipeline classCtx = new CodeGeneratorPipeline(); //new SB().ii(1);
toJavaPredictBody(ccsb.ii(1), classCtx, fileCtx, verboseCode);
ccsb.ip("return preds;").nl();
ccsb.di(1).ip("}").nl();
// Output class context
classCtx.generate(ccsb.ii(1));
ccsb.di(1);
return ccsb;
}
// Convenience method for testing: build Java, convert it to a class &
// execute it: compare the results of the new class's (JIT'd) scoring with
// the built-in (interpreted) scoring on this dataset. Returns true if all
// is well, false is there are any mismatches. Throws if there is any error
// (typically an AssertionError or unable to compile the POJO).
public boolean testJavaScoring( Frame data, Frame model_predictions, double rel_epsilon) {
assert data.numRows()==model_predictions.numRows();
final Frame fr = new Frame(data);
boolean computeMetrics = data.find(_output.responseName()) != -1;
try {
String[] warns = adaptTestForTrain(fr,true, computeMetrics);
if( warns.length > 0 )
System.err.println(Arrays.toString(warns));
// Output is in the model's domain, but needs to be mapped to the scored
// dataset's domain.
int[] omap = null;
if( _output.isClassifier() ) {
Vec actual = fr.vec(_output.responseName());
String sdomain[] = actual == null ? null : actual.domain(); // Scored/test domain; can be null
String mdomain[] = model_predictions.vec(0).domain(); // Domain of predictions (union of test and train)
if( sdomain != null && mdomain != sdomain && !Arrays.equals(mdomain, sdomain)) {
omap = CategoricalWrappedVec.computeMap(mdomain,sdomain); // Map from model-domain to scoring-domain
}
}
String modelName = JCodeGen.toJavaId(_key.toString());
boolean preview = false;
String java_text = toJava(preview, true);
GenModel genmodel;
try {
Class clz = JCodeGen.compile(modelName,java_text);
genmodel = (GenModel)clz.newInstance();
} catch (Exception e) {
throw H2O.fail("Internal POJO compilation failed",e);
}
Vec[] dvecs = fr.vecs();
Vec[] pvecs = model_predictions.vecs();
double features [] = MemoryManager.malloc8d(genmodel._names.length);
double predictions[] = MemoryManager.malloc8d(genmodel.nclasses() + 1);
// Compare predictions, counting mis-predicts
int totalMiss = 0;
int miss = 0;
for( int row=0; row<fr.numRows(); row++ ) { // For all rows, single-threaded
// Native Java API
for (int col = 0; col < features.length; col++) // Build feature set
features[col] = dvecs[col].at(row);
genmodel.score0(features, predictions); // POJO predictions
for (int col = 0; col < pvecs.length; col++) { // Compare predictions
double d = pvecs[col].at(row); // Load internal scoring predictions
if (col == 0 && omap != null) d = omap[(int) d]; // map categorical response to scoring domain
if (!MathUtils.compare(predictions[col], d, 1e-15, rel_epsilon)) {
if (miss++ < 10)
System.err.println("Predictions mismatch, row " + row + ", col " + model_predictions._names[col] + ", internal prediction=" + d + ", POJO prediction=" + predictions[col]);
}
}
totalMiss = miss;
}
// EasyPredict API
EasyPredictModelWrapper epmw = new EasyPredictModelWrapper(genmodel);
RowData rowData = new RowData();
for( int row=0; row<fr.numRows(); row++ ) { // For all rows, single-threaded
if (genmodel.getModelCategory() == ModelCategory.AutoEncoder) continue;
for( int col=0; col<features.length; col++ ) {
double val = dvecs[col].at(row);
rowData.put(
genmodel._names[col],
genmodel._domains[col] == null ? (Double) val
: (int)val < genmodel._domains[col].length ? genmodel._domains[col][(int)val] : "UnknownLevel");
}
AbstractPrediction p;
try { p=epmw.predict(rowData); }
catch (PredictException e) { continue; }
for (int col = 0; col < pvecs.length; col++) { // Compare predictions
double d = pvecs[col].at(row); // Load internal scoring predictions
if (col == 0 && omap != null) d = omap[(int) d]; // map categorical response to scoring domain
double d2 = Double.NaN;
switch( genmodel.getModelCategory()) {
case Clustering: d2 = ((ClusteringModelPrediction) p).cluster; break;
case Regression: d2 = ((RegressionModelPrediction) p).value; break;
case Binomial: BinomialModelPrediction bmp = ( BinomialModelPrediction) p;
d2 = (col==0) ? bmp.labelIndex : bmp.classProbabilities[col-1]; break;
case Multinomial: MultinomialModelPrediction mmp = (MultinomialModelPrediction) p;
d2 = (col==0) ? mmp.labelIndex : mmp.classProbabilities[col-1]; break;
}
if( !MathUtils.compare(d2, d, 1e-15, rel_epsilon) ) {
miss++;
System.err.println("EasyPredict Predictions mismatch, row " + row + ", col " + model_predictions._names[col] + ", internal prediction=" + d + ", POJO prediction=" + predictions[col]);
}
totalMiss = miss;
}
}
if (totalMiss != 0) System.err.println("Number of mismatches: " + totalMiss);
return totalMiss==0;
} finally {
cleanup_adapt(fr, data); // Remove temp keys.
}
}
public void deleteCrossValidationModels( ) {
if (_output._cross_validation_models != null) {
for (Key k : _output._cross_validation_models) {
Model m = DKV.getGet(k);
if (m!=null) m.delete(); //delete all subparts
}
}
}
@Override public String toString() {
return _output.toString();
}
/** Model stream writer - output Java code representation of model. */
public class JavaModelStreamWriter extends StreamWriter {
/** Show only preview */
private final boolean preview;
public JavaModelStreamWriter(boolean preview) {
this.preview = preview;
}
@Override
public void writeTo(OutputStream os) {
toJava(os, preview, true);
}
}
@Override public Class<water.api.KeyV3.ModelKeyV3> makeSchema() { return water.api.KeyV3.ModelKeyV3.class; }
}
|
Only do categorical adaptation of levels if expensive=true.
|
h2o-core/src/main/java/hex/Model.java
|
Only do categorical adaptation of levels if expensive=true.
|
|
Java
|
apache-2.0
|
24eccd4b66f77c5b54b8a76ac372ad205a84d1f7
| 0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.actionSystem.impl;
import com.intellij.ide.DataManager;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.ActionPopupMenu;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationActivationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.ui.JBPopupMenu;
import com.intellij.openapi.util.Getter;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.impl.InternalDecorator;
import com.intellij.ui.ComponentUtil;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
import java.awt.*;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
final class ActionPopupMenuImpl implements ActionPopupMenu, ApplicationActivationListener {
private static final Logger LOG = Logger.getInstance(ActionPopupMenuImpl.class);
private final Application myApp;
private final MyMenu myMenu;
private final ActionManagerImpl myManager;
private Getter<? extends DataContext> myDataContextProvider;
private MessageBusConnection myConnection;
private IdeFrame myFrame;
private boolean myIsToolWindowContextMenu;
ActionPopupMenuImpl(@NotNull String place, @NotNull ActionGroup group,
@NotNull ActionManagerImpl actionManager,
@Nullable PresentationFactory factory) {
myManager = actionManager;
myMenu = new MyMenu(place, group, factory);
myApp = ApplicationManager.getApplication();
}
@NotNull
@Override
public JPopupMenu getComponent() {
return myMenu;
}
@Override
@NotNull
public String getPlace() {
return myMenu.myPlace;
}
@NotNull
@Override
public ActionGroup getActionGroup() {
return myMenu.myGroup;
}
void setDataContextProvider(@NotNull Getter<? extends DataContext> dataContextProvider) {
myDataContextProvider = dataContextProvider;
}
@Override
public void setTargetComponent(@NotNull JComponent component) {
myDataContextProvider = () -> DataManager.getInstance().getDataContext(component);
myIsToolWindowContextMenu = ComponentUtil
.getParentOfType((Class<? extends InternalDecorator>)InternalDecorator.class, (Component)component) != null;
}
boolean isToolWindowContextMenu() {
return myIsToolWindowContextMenu;
}
private class MyMenu extends JBPopupMenu {
@NotNull
private final String myPlace;
@NotNull
private final ActionGroup myGroup;
private DataContext myContext;
private final PresentationFactory myPresentationFactory;
MyMenu(@NotNull String place, @NotNull ActionGroup group, @Nullable PresentationFactory factory) {
myPlace = place;
myGroup = group;
myPresentationFactory = factory != null ? factory : new MenuItemPresentationFactory();
addPopupMenuListener(new MyPopupMenuListener());
}
@Override
public void show(final Component component, int x, int y) {
if (!component.isShowing()) {
throw new IllegalArgumentException("component must be shown on the screen (" + component + ")");
}
removeAll();
// Fill menu. Only after filling menu has non zero size.
int x2 = Math.max(0, Math.min(x, component.getWidth() - 1)); // fit x into [0, width-1]
int y2 = Math.max(0, Math.min(y, component.getHeight() - 1)); // fit y into [0, height-1]
myContext = myDataContextProvider != null ? myDataContextProvider.get() : DataManager.getInstance().getDataContext(component, x2, y2);
long time = -System.currentTimeMillis();
Utils.fillMenu(myGroup, this, true, myPresentationFactory, myContext, myPlace, false, LaterInvocator.isInModalContext(), false);
time += System.currentTimeMillis();
if (time > 1000) LOG.warn(time + "ms to fill popup menu " + myPlace);
if (getComponentCount() == 0) {
LOG.warn("no components in popup menu " + myPlace);
return;
}
if (myApp != null) {
if (myApp.isActive()) {
Component frame = UIUtil.findUltimateParent(component);
if (frame instanceof IdeFrame) {
myFrame = (IdeFrame)frame;
}
myConnection = myApp.getMessageBus().connect();
myConnection.subscribe(ApplicationActivationListener.TOPIC, ActionPopupMenuImpl.this);
}
}
assert component.isShowing() : "Component: " + component;
super.show(component, x, y);
}
@Override
public void setVisible(boolean b) {
super.setVisible(b);
if (!b) ReflectionUtil.resetField(this, "invoker");
}
private class MyPopupMenuListener implements PopupMenuListener {
@Override
public void popupMenuCanceled(PopupMenuEvent e) {
disposeMenu();
}
@Override
public void popupMenuWillBecomeInvisible(PopupMenuEvent e) {
disposeMenu();
}
private void disposeMenu() {
myManager.removeActionPopup(ActionPopupMenuImpl.this);
removeAll();
if (myConnection != null) {
myConnection.disconnect();
}
}
@Override
public void popupMenuWillBecomeVisible(PopupMenuEvent e) {
removeAll();
Utils.fillMenu(myGroup, MyMenu.this, !UISettings.getInstance().getDisableMnemonics(), myPresentationFactory, myContext, myPlace, false,
LaterInvocator.isInModalContext(), false);
myManager.addActionPopup(ActionPopupMenuImpl.this);
}
}
}
@Override
public void applicationDeactivated(@NotNull IdeFrame ideFrame) {
if (myFrame == ideFrame) {
myMenu.setVisible(false);
}
}
}
|
platform/platform-impl/src/com/intellij/openapi/actionSystem/impl/ActionPopupMenuImpl.java
|
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.actionSystem.impl;
import com.intellij.ide.DataManager;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.ActionPopupMenu;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationActivationListener;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.ui.JBPopupMenu;
import com.intellij.openapi.util.Getter;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.impl.InternalDecorator;
import com.intellij.ui.ComponentUtil;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.PopupMenuEvent;
import javax.swing.event.PopupMenuListener;
import java.awt.*;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
final class ActionPopupMenuImpl implements ActionPopupMenu, ApplicationActivationListener {
private final Application myApp;
private final MyMenu myMenu;
private final ActionManagerImpl myManager;
private Getter<? extends DataContext> myDataContextProvider;
private MessageBusConnection myConnection;
private IdeFrame myFrame;
private boolean myIsToolWindowContextMenu;
ActionPopupMenuImpl(@NotNull String place, @NotNull ActionGroup group,
@NotNull ActionManagerImpl actionManager,
@Nullable PresentationFactory factory) {
myManager = actionManager;
myMenu = new MyMenu(place, group, factory);
myApp = ApplicationManager.getApplication();
}
@NotNull
@Override
public JPopupMenu getComponent() {
return myMenu;
}
@Override
@NotNull
public String getPlace() {
return myMenu.myPlace;
}
@NotNull
@Override
public ActionGroup getActionGroup() {
return myMenu.myGroup;
}
void setDataContextProvider(@NotNull Getter<? extends DataContext> dataContextProvider) {
myDataContextProvider = dataContextProvider;
}
@Override
public void setTargetComponent(@NotNull JComponent component) {
myDataContextProvider = () -> DataManager.getInstance().getDataContext(component);
myIsToolWindowContextMenu = ComponentUtil
.getParentOfType((Class<? extends InternalDecorator>)InternalDecorator.class, (Component)component) != null;
}
boolean isToolWindowContextMenu() {
return myIsToolWindowContextMenu;
}
private class MyMenu extends JBPopupMenu {
@NotNull
private final String myPlace;
@NotNull
private final ActionGroup myGroup;
private DataContext myContext;
private final PresentationFactory myPresentationFactory;
MyMenu(@NotNull String place, @NotNull ActionGroup group, @Nullable PresentationFactory factory) {
myPlace = place;
myGroup = group;
myPresentationFactory = factory != null ? factory : new MenuItemPresentationFactory();
addPopupMenuListener(new MyPopupMenuListener());
}
@Override
public void show(final Component component, int x, int y) {
if (!component.isShowing()) {
throw new IllegalArgumentException("component must be shown on the screen (" + component + ")");
}
removeAll();
// Fill menu. Only after filling menu has non zero size.
int x2 = Math.max(0, Math.min(x, component.getWidth() - 1)); // fit x into [0, width-1]
int y2 = Math.max(0, Math.min(y, component.getHeight() - 1)); // fit y into [0, height-1]
myContext = myDataContextProvider != null ? myDataContextProvider.get() : DataManager.getInstance().getDataContext(component, x2, y2);
Utils.fillMenu(myGroup, this, true, myPresentationFactory, myContext, myPlace, false, LaterInvocator.isInModalContext(), false);
if (getComponentCount() == 0) {
return;
}
if (myApp != null) {
if (myApp.isActive()) {
Component frame = UIUtil.findUltimateParent(component);
if (frame instanceof IdeFrame) {
myFrame = (IdeFrame)frame;
}
myConnection = myApp.getMessageBus().connect();
myConnection.subscribe(ApplicationActivationListener.TOPIC, ActionPopupMenuImpl.this);
}
}
assert component.isShowing() : "Component: " + component;
super.show(component, x, y);
}
@Override
public void setVisible(boolean b) {
super.setVisible(b);
if (!b) ReflectionUtil.resetField(this, "invoker");
}
private class MyPopupMenuListener implements PopupMenuListener {
@Override
public void popupMenuCanceled(PopupMenuEvent e) {
disposeMenu();
}
@Override
public void popupMenuWillBecomeInvisible(PopupMenuEvent e) {
disposeMenu();
}
private void disposeMenu() {
myManager.removeActionPopup(ActionPopupMenuImpl.this);
removeAll();
if (myConnection != null) {
myConnection.disconnect();
}
}
@Override
public void popupMenuWillBecomeVisible(PopupMenuEvent e) {
removeAll();
Utils.fillMenu(myGroup, MyMenu.this, !UISettings.getInstance().getDisableMnemonics(), myPresentationFactory, myContext, myPlace, false,
LaterInvocator.isInModalContext(), false);
myManager.addActionPopup(ActionPopupMenuImpl.this);
}
}
}
@Override
public void applicationDeactivated(@NotNull IdeFrame ideFrame) {
if (myFrame == ideFrame) {
myMenu.setVisible(false);
}
}
}
|
IDEA-223588: add logging to investigate the issue
GitOrigin-RevId: 657256986b121d5016fa1263fa9dc181559a9c44
|
platform/platform-impl/src/com/intellij/openapi/actionSystem/impl/ActionPopupMenuImpl.java
|
IDEA-223588: add logging to investigate the issue
|
|
Java
|
apache-2.0
|
63473e6f31f7cf42423ef844a518e91562010cb3
| 0
|
nezda/yawni,nezda/yawni,nezda/yawni,nezda/yawni,nezda/yawni
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yawni.wn;
import org.yawni.util.CharSequenceTokenizer;
/**
* A <code>Relation</code> encodes a lexical <em>or</em> semantic relationship between WordNet entities. A lexical
* relationship holds between {@link WordSense}s; a semantic relationship holds between {@link Synset}s.
* Relationships are <em>directional</em>: the two roles of a relationship are the <em>source</em> and <em>target</em>.
* Relationships are <em>typed</em>: the type of a relationship is a {@link RelationType}, and can
* be retrieved via {@link Relation#getType RelationType.getType()}.
*
* <p> Note this class used to be called {@code Pointer} as it is often referred to in the official WordNet documentation.
*
* @see Synset
* @see WordSense
*/
public class Relation implements Comparable<Relation> {
/**
* These target* fields are used to avoid paging in the target before it is
* required, and to prevent keeping a large portion of the database resident
* once the target has been queried. The first time they are used, they act as
* an external key; subsequent uses, in conjunction with
* {@link FileBackedDictionary}'s caching mechanism, can be thought of as a
* {@link java.lang.ref.WeakReference}.
*/
private final int targetOffset;
private final int targetIndex;
private final byte targetPOSOrdinal;
//
// Instance variables
//
/**
* The index of this Relation within the array of Relation's in the source Synset.
* Used in <code>equals</code>.
*/
private final int index;
private final RelationTarget source;
private final byte relationTypeOrdinal;
//
// Constructor
//
Relation(final int targetOffset, final int targetIndex, final byte targetPOSOrdinal,
final int index, final RelationTarget source, final byte relationTypeOrdinal) {
this.targetOffset = targetOffset;
this.targetIndex = targetIndex;
this.targetPOSOrdinal = targetPOSOrdinal;
this.index = index;
this.source = source;
this.relationTypeOrdinal = relationTypeOrdinal;
}
/** Factory method */
static Relation makeRelation(final Synset synset, final int index, final CharSequenceTokenizer tokenizer) {
final byte relationTypeOrdinal = (byte) RelationType.parseKey(tokenizer.nextToken()).ordinal();
final int targetOffset = tokenizer.nextInt();
final byte targetPOSOrdinal = (byte) POS.lookup(tokenizer.nextToken()).ordinal();
final int linkIndices = tokenizer.nextHexInt();
assert linkIndices >> 16 == 0;
final int sourceIndex = linkIndices >> 8; // select high byte
final int targetIndex = linkIndices & 0xFF; // select low byte
final RelationTarget source = Relation.resolveTarget(synset, sourceIndex);
if (source instanceof WordSense) {
return new LexicalRelation(targetOffset, targetIndex, targetPOSOrdinal, index, source, relationTypeOrdinal);
} else if (source instanceof Synset) {
return new SemanticRelation(targetOffset, targetIndex, targetPOSOrdinal, index, source, relationTypeOrdinal);
} else {
throw new IllegalStateException();
}
}
//
// Accessors
//
public RelationType getType() {
return RelationType.fromOrdinal(relationTypeOrdinal);
}
/** A lexical relationship holds between {@link WordSense}s */
public boolean isLexical() {
return source instanceof WordSense;
// else assert instanceof Synset;
}
/** A semantic relationship holds between {@link Synset}s */
public boolean isSemantic() {
return source instanceof Synset;
// else assert instanceof WordSense;
}
public RelationTarget getSource() {
return source;
}
public RelationTarget getTarget() {
return Relation.resolveTarget(
// using source.getSynset() to avoid requiring a local field
source.getSynset().fileBackedDictionary.getSynsetAt(
POS.fromOrdinal(targetPOSOrdinal),
targetOffset),
targetIndex);
}
private static RelationTarget resolveTarget(final Synset synset, final int index) {
if (index == 0) {
return synset;
} else {
return synset.getWordSense(index - 1);
}
}
//
// Object methods
//
@Override
public boolean equals(final Object that) {
return (that instanceof Relation)
&& ((Relation) that).source.equals(this.source)
&& ((Relation) that).index == this.index;
}
@Override
public int hashCode() {
return source.hashCode() + index;
}
@Override
public String toString() {
return new StringBuilder("[").
append(getClass().getSimpleName()).
//append("Relation").
append(' ').
append(getType().name()).
//append("#").
//append(index).
append(" from ").
//append(source).
append(source).
//append(" → ").
append(" to ").
append(getTarget()).
append(']').toString();
}
/** {@inheritDoc} */
public int compareTo(final Relation that) {
// order by src Synset
// then by 'index' field
int result;
result = this.getSource().getSynset().compareTo(that.getSource().getSynset());
if (result == 0) {
result = this.index - that.index;
}
return result;
}
}
|
core/src/main/java/org/yawni/wn/Relation.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yawni.wn;
import org.yawni.util.CharSequenceTokenizer;
/**
* A <code>Relation</code> encodes a lexical <em>or</em> semantic relationship between WordNet entities. A lexical
* relationship holds between {@link WordSense}s; a semantic relationship holds between {@link Synset}s.
* Relationships are <em>directional</em>: the two roles of a relationship are the <em>source</em> and <em>target</em>.
* Relationships are <em>typed</em>: the type of a relationship is a {@link RelationType}, and can
* be retrieved via {@link Relation#getType RelationType.getType()}.
*
* <p> Note this class used to be called {@code Pointer} as it is often referred to in the official WordNet documentation.
*
* @see Synset
* @see WordSense
*/
public class Relation implements Comparable<Relation> {
/**
* These target* fields are used to avoid paging in the target before it is
* required, and to prevent keeping a large portion of the database resident
* once the target has been queried. The first time they are used, they act as
* an external key; subsequent uses, in conjunction with
* {@link FileBackedDictionary}'s caching mechanism, can be thought of as a
* {@link java.lang.ref.WeakReference}.
*/
private final int targetOffset;
private final int targetIndex;
private final byte targetPOSOrdinal;
//
// Instance variables
//
/**
* The index of this Relation within the array of Relation's in the source Synset.
* Used in <code>equals</code>.
*/
private final int index;
private final RelationTarget source;
private final byte relationTypeOrdinal;
//
// Constructor
//
Relation(final int targetOffset, final int targetIndex, final byte targetPOSOrdinal,
final int index, final RelationTarget source, final byte relationTypeOrdinal) {
this.targetOffset = targetOffset;
this.targetIndex = targetIndex;
this.targetPOSOrdinal = targetPOSOrdinal;
this.index = index;
this.source = source;
this.relationTypeOrdinal = relationTypeOrdinal;
}
/** Factory method */
static Relation makeRelation(final Synset synset, final int index, final CharSequenceTokenizer tokenizer) {
final byte relationTypeOrdinal = (byte) RelationType.parseKey(tokenizer.nextToken()).ordinal();
final int targetOffset = tokenizer.nextInt();
final byte targetPOSOrdinal = (byte) POS.lookup(tokenizer.nextToken()).ordinal();
final int linkIndices = tokenizer.nextHexInt();
assert linkIndices >> 16 == 0;
final int sourceIndex = linkIndices >> 8; // select high byte
final int targetIndex = linkIndices & 0xFF; // select low byte
final RelationTarget source = Relation.resolveTarget(synset, sourceIndex);
if (source instanceof WordSense) {
return new LexicalRelation(targetOffset, targetIndex, targetPOSOrdinal, index, source, relationTypeOrdinal);
} else if (source instanceof Synset) {
return new SemanticRelation(targetOffset, targetIndex, targetPOSOrdinal, index, source, relationTypeOrdinal);
} else {
throw new IllegalStateException();
}
}
//
// Accessors
//
public RelationType getType() {
return RelationType.fromOrdinal(relationTypeOrdinal);
}
/** A lexical relationship holds between {@link WordSense}s */
public boolean isLexical() {
return source instanceof WordSense;
// else assert instanceof Synset;
}
/** A semantic relationship holds between {@link Synset}s */
public boolean isSemantic() {
return source instanceof Synset;
// else assert instanceof WordSense;
}
public RelationTarget getSource() {
return source;
}
public RelationTarget getTarget() {
return Relation.resolveTarget(
// using source.getSynset() to avoid requiring a local field
source.getSynset().fileBackedDictionary.getSynsetAt(
POS.fromOrdinal(targetPOSOrdinal),
targetOffset),
targetIndex);
}
private static RelationTarget resolveTarget(final Synset synset, final int index) {
if (index == 0) {
return synset;
} else {
return synset.getWordSense(index - 1);
}
}
//
// Object methods
//
@Override
public boolean equals(final Object that) {
return (that instanceof Relation)
&& ((Relation) that).source.equals(this.source)
&& ((Relation) that).index == this.index;
}
@Override
public int hashCode() {
return source.hashCode() + index;
}
@Override
public String toString() {
return new StringBuilder("[").
append(getClass().getSimpleName()).
//append("Relation").
append(' ').
append(getType().name()).
//append("#").
//append(index).
append(" from ").
//append(source).
append(source).
//append(" → ").
append(" to ").
append(getTarget()).
append(']').toString();
}
/** {@inheritDoc} */
public int compareTo(final Relation that) {
// order by src Synset
// then by 'index' field
int result;
result = this.getSource().getSynset().compareTo(that.getSource().getSynset());
if (result == 0) {
result = this.index - that.index;
}
return result;
}
}
|
minor
|
core/src/main/java/org/yawni/wn/Relation.java
|
minor
|
|
Java
|
apache-2.0
|
81cf34b466cdd5c2a9a72189e6536a4e119e7996
| 0
|
wwjiang007/alluxio,solzy/tachyon,apc999/alluxio,jswudi/alluxio,aaudiber/alluxio,solzy/tachyon,madanadit/alluxio,apc999/alluxio,yuluo-ding/alluxio,mesosphere/tachyon,bf8086/alluxio,maboelhassan/alluxio,uronce-cc/alluxio,bf8086/alluxio,maobaolong/alluxio,apc999/alluxio,jswudi/alluxio,madanadit/alluxio,maboelhassan/alluxio,riversand963/alluxio,jswudi/alluxio,jsimsa/alluxio,ShailShah/alluxio,Reidddddd/mo-alluxio,yuluo-ding/alluxio,carsonwang/tachyon,wwjiang007/alluxio,calvinjia/tachyon,maobaolong/alluxio,EvilMcJerkface/alluxio,mesosphere/tachyon,Reidddddd/alluxio,mesosphere/tachyon,ShailShah/alluxio,Reidddddd/mo-alluxio,calvinjia/tachyon,Alluxio/alluxio,ShailShah/alluxio,Alluxio/alluxio,bf8086/alluxio,madanadit/alluxio,WilliamZapata/alluxio,jsimsa/alluxio,Alluxio/alluxio,Reidddddd/alluxio,yuluo-ding/alluxio,ooq/memory,Reidddddd/alluxio,carsonwang/tachyon,ooq/memory,aaudiber/alluxio,EvilMcJerkface/alluxio,apc999/alluxio,WilliamZapata/alluxio,apc999/alluxio,calvinjia/tachyon,madanadit/alluxio,carsonwang/tachyon,maboelhassan/alluxio,jsimsa/alluxio,maboelhassan/alluxio,jsimsa/alluxio,maboelhassan/alluxio,madanadit/alluxio,WilliamZapata/alluxio,jsimsa/alluxio,ChangerYoung/alluxio,Alluxio/alluxio,solzy/tachyon,ShailShah/alluxio,wwjiang007/alluxio,calvinjia/tachyon,ChangerYoung/alluxio,madanadit/alluxio,uronce-cc/alluxio,wwjiang007/alluxio,bf8086/alluxio,maobaolong/alluxio,ShailShah/alluxio,riversand963/alluxio,WilliamZapata/alluxio,PasaLab/tachyon,bf8086/alluxio,ShailShah/alluxio,maobaolong/alluxio,ooq/memory,Alluxio/alluxio,wwjiang007/alluxio,apc999/alluxio,uronce-cc/alluxio,calvinjia/tachyon,carsonwang/tachyon,Reidddddd/mo-alluxio,aaudiber/alluxio,aaudiber/alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,riversand963/alluxio,ooq/memory,uronce-cc/alluxio,riversand963/alluxio,WilliamZapata/alluxio,maobaolong/alluxio,jswudi/alluxio,wwjiang007/alluxio,aaudiber/alluxio,jswudi/alluxio,maobaolong/alluxio,WilliamZapata/alluxio,maobaolong/alluxio,madanadit/alluxio,maobaolong/alluxio,Reidddddd/alluxio,yuluo-ding/alluxio,jsimsa/alluxio,riversand963/alluxio,bf8086/alluxio,EvilMcJerkface/alluxio,Reidddddd/mo-alluxio,maboelhassan/alluxio,Reidddddd/mo-alluxio,PasaLab/tachyon,PasaLab/tachyon,uronce-cc/alluxio,madanadit/alluxio,calvinjia/tachyon,ChangerYoung/alluxio,Alluxio/alluxio,Alluxio/alluxio,Reidddddd/alluxio,apc999/alluxio,calvinjia/tachyon,PasaLab/tachyon,wwjiang007/alluxio,wwjiang007/alluxio,maboelhassan/alluxio,solzy/tachyon,ChangerYoung/alluxio,Alluxio/alluxio,PasaLab/tachyon,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,maobaolong/alluxio,aaudiber/alluxio,jswudi/alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,yuluo-ding/alluxio,aaudiber/alluxio,maobaolong/alluxio,riversand963/alluxio,Alluxio/alluxio,Reidddddd/alluxio,Alluxio/alluxio,mesosphere/tachyon,wwjiang007/alluxio,Reidddddd/alluxio,Reidddddd/mo-alluxio,bf8086/alluxio,uronce-cc/alluxio,PasaLab/tachyon,bf8086/alluxio,PasaLab/tachyon,EvilMcJerkface/alluxio,ChangerYoung/alluxio
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tachyon;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.log4j.Logger;
import tachyon.UnderFileSystem.SpaceType;
import tachyon.conf.CommonConf;
import tachyon.conf.MasterConf;
import tachyon.io.Utils;
import tachyon.thrift.BlockInfoException;
import tachyon.thrift.ClientBlockInfo;
import tachyon.thrift.ClientFileInfo;
import tachyon.thrift.ClientRawTableInfo;
import tachyon.thrift.ClientWorkerInfo;
import tachyon.thrift.Command;
import tachyon.thrift.CommandType;
import tachyon.thrift.FileAlreadyExistException;
import tachyon.thrift.FileDoesNotExistException;
import tachyon.thrift.InvalidPathException;
import tachyon.thrift.NetAddress;
import tachyon.thrift.SuspectedFileSizeException;
import tachyon.thrift.TableColumnException;
import tachyon.thrift.TableDoesNotExistException;
import tachyon.thrift.TachyonException;
import tachyon.util.CommonUtils;
/**
* A global view of filesystem in master.
*/
public class MasterInfo {
public static final String COL = "COL_";
private final Logger LOG = Logger.getLogger(Constants.LOGGER_TYPE);
private final InetSocketAddress MASTER_ADDRESS;
private final long START_TIME_NS_PREFIX;
private final long START_TIME_MS;
private final MasterConf MASTER_CONF;
private CheckpointInfo mCheckpointInfo = new CheckpointInfo(0, 0);
private AtomicInteger mInodeCounter = new AtomicInteger(0);
private AtomicInteger mUserCounter = new AtomicInteger(0);
private AtomicInteger mWorkerCounter = new AtomicInteger(0);
// Root Inode's id must be 1.
private InodeFolder mRoot;
private Map<Integer, Inode> mInodes = new HashMap<Integer, Inode>();
private Map<Long, MasterWorkerInfo> mWorkers = new HashMap<Long, MasterWorkerInfo>();
private Map<InetSocketAddress, Long> mWorkerAddressToId = new HashMap<InetSocketAddress, Long>();
private BlockingQueue<MasterWorkerInfo> mLostWorkers = new ArrayBlockingQueue<MasterWorkerInfo>(32);
// TODO Check the logic related to this two lists.
private PrefixList mWhiteList;
private PrefixList mPinList;
private Set<Integer> mFileIdPinList;
private Journal mJournal;
private HeartbeatThread mHeartbeatThread;
/**
* Master info periodical status check.
*/
public class MasterInfoHeartbeatExecutor implements HeartbeatExecutor {
@Override
public void heartbeat() {
LOG.debug("System status checking.");
Set<Long> lostWorkers = new HashSet<Long>();
synchronized (mWorkers) {
for (Entry<Long, MasterWorkerInfo> worker: mWorkers.entrySet()) {
if (CommonUtils.getCurrentMs() - worker.getValue().getLastUpdatedTimeMs()
> MASTER_CONF.WORKER_TIMEOUT_MS) {
LOG.error("The worker " + worker.getValue() + " got timed out!");
mLostWorkers.add(worker.getValue());
lostWorkers.add(worker.getKey());
}
}
for (long workerId: lostWorkers) {
MasterWorkerInfo workerInfo = mWorkers.get(workerId);
mWorkerAddressToId.remove(workerInfo.getAddress());
mWorkers.remove(workerId);
}
}
boolean hadFailedWorker = false;
while (mLostWorkers.size() != 0) {
hadFailedWorker = true;
MasterWorkerInfo worker = mLostWorkers.poll();
// TODO these a lock is not efficient. Since node failure is rare, this is fine for now.
synchronized (mRoot) {
try {
for (long blockId: worker.getBlocks()) {
int fileId = BlockInfo.computeInodeId(blockId);
InodeFile tFile = (InodeFile) mInodes.get(fileId);
if (tFile != null) {
int blockIndex = BlockInfo.computeBlockIndex(blockId);
tFile.removeLocation(blockIndex, worker.getId());
if (!tFile.hasCheckpointed() && tFile.getBlockLocations(blockIndex).size() == 0) {
LOG.info("Block " + blockId + " got lost from worker " + worker.getId() + " .");
} else {
LOG.info("Block " + blockId + " only lost an in memory copy from worker " +
worker.getId());
}
}
}
} catch (BlockInfoException e) {
LOG.error(e);
}
}
}
if (hadFailedWorker) {
LOG.warn("Restarting failed workers.");
try {
java.lang.Runtime.getRuntime().exec(CommonConf.get().TACHYON_HOME +
"/bin/tachyon-start.sh restart_workers");
} catch (IOException e) {
LOG.error(e.getMessage());
}
}
}
}
public class RecomputeCmd implements Runnable {
private final String CMD;
private final String FILE_PATH;
public RecomputeCmd(String cmd, String filePath) {
CMD = cmd;
FILE_PATH = filePath;
}
@Override
public void run() {
try {
LOG.info("Exec " + CMD + " output to " + FILE_PATH);
Process p = java.lang.Runtime.getRuntime().exec(CMD);
String line;
BufferedReader bri = new BufferedReader(new InputStreamReader(p.getInputStream()));
BufferedReader bre = new BufferedReader(new InputStreamReader(p.getErrorStream()));
File file = new File(FILE_PATH);
FileWriter fw = new FileWriter(file.getAbsoluteFile());
BufferedWriter bw = new BufferedWriter(fw);
while ((line = bri.readLine()) != null) {
bw.write(line + "\n");
}
bri.close();
while ((line = bre.readLine()) != null) {
bw.write(line + "\n");
}
bre.close();
bw.flush();
bw.close();
p.waitFor();
LOG.info("Exec " + CMD + " output to " + FILE_PATH + " done.");
} catch (IOException e) {
LOG.error(e.getMessage());
} catch (InterruptedException e) {
LOG.error(e.getMessage());
}
}
}
public MasterInfo(InetSocketAddress address, Journal journal) throws IOException {
MASTER_CONF = MasterConf.get();
mRoot = new InodeFolder("", mInodeCounter.incrementAndGet(), -1, System.currentTimeMillis());
mInodes.put(mRoot.getId(), mRoot);
MASTER_ADDRESS = address;
START_TIME_MS = System.currentTimeMillis();
// TODO This name need to be changed.
START_TIME_NS_PREFIX = START_TIME_MS - (START_TIME_MS % 1000000);
mJournal = journal;
mWhiteList = new PrefixList(MASTER_CONF.WHITELIST);
mPinList = new PrefixList(MASTER_CONF.PINLIST);
mFileIdPinList = Collections.synchronizedSet(new HashSet<Integer>());
mJournal.loadImage(this);
}
public void init() throws IOException {
mCheckpointInfo.updateEditTransactionCounter(mJournal.loadEditLog(this));
mJournal.createImage(this);
mJournal.createEditLog(mCheckpointInfo.getEditTransactionCounter());
mHeartbeatThread = new HeartbeatThread("Master Heartbeat",
new MasterInfoHeartbeatExecutor(), MASTER_CONF.HEARTBEAT_INTERVAL_MS);
mHeartbeatThread.start();
}
/**
* Add a checkpoint to a file.
* @param workerId The worker which submitted the request. -1 if the request is not from a worker.
* @param fileId The file to add the checkpoint.
* @param length The length of the checkpoint.
* @param checkpointPath The path of the checkpoint.
* @return true if the checkpoint is added successfully, false if not.
* @throws FileNotFoundException
* @throws SuspectedFileSizeException
* @throws BlockInfoException
*/
public boolean addCheckpoint(long workerId, int fileId, long length, String checkpointPath)
throws FileNotFoundException, SuspectedFileSizeException, BlockInfoException {
LOG.info(CommonUtils.parametersToString(workerId, fileId, length, checkpointPath));
if (workerId != -1) {
MasterWorkerInfo tWorkerInfo = getWorkerInfo(workerId);
tWorkerInfo.updateLastUpdatedTimeMs();
}
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileNotFoundException("File " + fileId + " does not exist.");
}
if (inode.isDirectory()) {
throw new FileNotFoundException("File " + fileId + " is a folder.");
}
InodeFile tFile = (InodeFile) inode;
boolean needLog = false;
if (tFile.isComplete()) {
if (tFile.getLength() != length) {
throw new SuspectedFileSizeException(fileId + ". Original Size: " +
tFile.getLength() + ". New Size: " + length);
}
} else {
tFile.setLength(length);
needLog = true;
}
if (!tFile.hasCheckpointed()) {
tFile.setCheckpointPath(checkpointPath);
needLog = true;
}
tFile.setComplete();
if (needLog) {
mJournal.getEditLog().addCheckpoint(fileId, length, checkpointPath);
mJournal.getEditLog().flush();
}
return true;
}
}
/**
* A worker cache a block in its memory.
*
* @param workerId
* @param workerUsedBytes
* @param blockId
* @param length
* @throws FileDoesNotExistException
* @throws SuspectedFileSizeException
* @throws BlockInfoException
*/
public void cacheBlock(long workerId, long workerUsedBytes, long blockId, long length)
throws FileDoesNotExistException, SuspectedFileSizeException, BlockInfoException {
LOG.debug(CommonUtils.parametersToString(workerId, workerUsedBytes, blockId, length));
MasterWorkerInfo tWorkerInfo = getWorkerInfo(workerId);
tWorkerInfo.updateBlock(true, blockId);
tWorkerInfo.updateUsedBytes(workerUsedBytes);
tWorkerInfo.updateLastUpdatedTimeMs();
int fileId = BlockInfo.computeInodeId(blockId);
int blockIndex = BlockInfo.computeBlockIndex(blockId);
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exist.");
}
if (inode.isDirectory()) {
throw new FileDoesNotExistException("File " + fileId + " is a folder.");
}
InodeFile tFile = (InodeFile) inode;
if (tFile.getNumberOfBlocks() <= blockIndex) {
addBlock(tFile, new BlockInfo(tFile, blockIndex, length));
}
InetSocketAddress address = tWorkerInfo.ADDRESS;
tFile.addLocation(blockIndex, workerId,
new NetAddress(address.getHostName(), address.getPort()));
}
}
/**
* Called by edit log only.
* @param fileId
* @param blockIndex
* @param blockLength
* @throws FileDoesNotExistException
* @throws BlockInfoException
*/
void opAddBlock(int fileId, int blockIndex, long blockLength)
throws FileDoesNotExistException, BlockInfoException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exist.");
}
if (inode.isDirectory()) {
throw new FileDoesNotExistException("File " + fileId + " is a folder.");
}
addBlock((InodeFile) inode, new BlockInfo((InodeFile) inode, blockIndex, blockLength));
}
}
private void addBlock(InodeFile tFile, BlockInfo blockInfo) throws BlockInfoException {
tFile.addBlock(blockInfo);
mJournal.getEditLog().addBlock(tFile.getId(), blockInfo.BLOCK_INDEX, blockInfo.LENGTH);
mJournal.getEditLog().flush();
}
public int createFile(String path, long blockSizeByte)
throws FileAlreadyExistException, InvalidPathException, BlockInfoException, TachyonException {
return createFile(true, path, false, -1, null, blockSizeByte);
}
// TODO Make this API better.
/**
* Internal API.
* @param recursive
* @param path
* @param directory
* @param columns
* @param metadata
* @param blockSizeByte
* @param creationTimeMs
* @return
* @throws FileAlreadyExistException
* @throws InvalidPathException
* @throws BlockInfoException
* @throws TachyonException
*/
int _createFile(boolean recursive, String path, boolean directory, int columns,
ByteBuffer metadata, long blockSizeByte, long creationTimeMs)
throws FileAlreadyExistException, InvalidPathException, BlockInfoException,
TachyonException {
if (!directory && blockSizeByte < 1) {
throw new BlockInfoException("Invalid block size " + blockSizeByte);
}
LOG.debug("createFile" + CommonUtils.parametersToString(path));
String[] pathNames = getPathNames(path);
synchronized (mRoot) {
Inode inode = getInode(pathNames);
if (inode != null) {
if (inode.isDirectory() && (directory && columns == -1)) {
return inode.getId();
}
LOG.info("FileAlreadyExistException: File " + path + " already exist.");
throw new FileAlreadyExistException("File " + path + " already exist.");
}
String name = pathNames[pathNames.length - 1];
String folderPath = null;
if (path.length() - name.length() == 1) {
folderPath = path.substring(0, path.length() - name.length());
} else {
folderPath = path.substring(0, path.length() - name.length() - 1);
}
inode = getInode(folderPath);
if (inode == null) {
int succeed = 0;
if (recursive) {
succeed = createFile(true, folderPath, true, -1, null, blockSizeByte);
}
if (!recursive || succeed <= 0) {
LOG.info("InvalidPathException: File " + path + " creation failed. Folder "
+ folderPath + " does not exist.");
throw new InvalidPathException("InvalidPathException: File " + path + " creation " +
"failed. Folder " + folderPath + " does not exist.");
} else {
inode = mInodes.get(succeed);
}
} else if (inode.isFile()) {
LOG.info("InvalidPathException: File " + path + " creation failed. "
+ folderPath + " is a file.");
throw new InvalidPathException("File " + path + " creation failed. "
+ folderPath + " is a file");
}
Inode ret = null;
if (directory) {
if (columns != -1) {
ret = new InodeRawTable(name, mInodeCounter.incrementAndGet(), inode.getId(),
columns, metadata, creationTimeMs);
} else {
ret = new InodeFolder(
name, mInodeCounter.incrementAndGet(), inode.getId(), creationTimeMs);
}
} else {
ret = new InodeFile(
name, mInodeCounter.incrementAndGet(), inode.getId(), blockSizeByte, creationTimeMs);
String curPath = getPath(ret);
if (mPinList.inList(curPath)) {
synchronized (mFileIdPinList) {
mFileIdPinList.add(ret.getId());
((InodeFile) ret).setPin(true);
}
}
if (mWhiteList.inList(curPath)) {
((InodeFile) ret).setCache(true);
}
}
mInodes.put(ret.getId(), ret);
((InodeFolder) inode).addChild(ret.getId());
LOG.debug("createFile: File Created: " + ret + " parent: " + inode);
return ret.getId();
}
}
public int createFile(boolean recursive, String path, boolean directory, int columns,
ByteBuffer metadata, long blockSizeByte)
throws FileAlreadyExistException, InvalidPathException, BlockInfoException,
TachyonException {
long creationTimeMs = System.currentTimeMillis();
synchronized (mRoot) {
int ret =
_createFile(recursive, path, directory, columns, metadata, blockSizeByte, creationTimeMs);
mJournal.getEditLog().createFile(
recursive, path, directory, columns, metadata, blockSizeByte, creationTimeMs);
mJournal.getEditLog().flush();
return ret;
}
}
public void createImage(DataOutputStream os) throws IOException {
Queue<Inode> nodesQueue = new LinkedList<Inode>();
synchronized (mRoot) {
createImageInodeWriter(mRoot, os);
nodesQueue.add(mRoot);
while (!nodesQueue.isEmpty()) {
InodeFolder tFolder = (InodeFolder) nodesQueue.poll();
List<Integer> childrenIds = tFolder.getChildrenIds();
for (int id : childrenIds) {
Inode tInode = mInodes.get(id);
createImageInodeWriter(tInode, os);
if (tInode.isDirectory()) {
nodesQueue.add(tInode);
} else if (((InodeFile) tInode).isPin()) {
synchronized (mFileIdPinList) {
mFileIdPinList.add(tInode.getId());
}
}
}
}
os.writeByte(Image.T_CHECKPOINT);
os.writeInt(mInodeCounter.get());
os.writeLong(mCheckpointInfo.getEditTransactionCounter());
}
}
/**
* Load the image from <code>is</code>. Assume this blocks the whole MasterInfo.
* @param is the inputstream to load the image.
* @throws IOException
*/
public void loadImage(DataInputStream is) throws IOException {
while (true) {
byte type = -1;
try {
type = is.readByte();
} catch (EOFException e) {
return;
}
if (type == Image.T_CHECKPOINT) {
mInodeCounter.set(is.readInt());
mCheckpointInfo.updateEditTransactionCounter(is.readLong());
} else {
if (type > Image.T_INODE_RAW_TABLE) {
throw new IOException("Corrupted image with unknown element type: " + type);
}
long creationTimeMs = is.readLong();
int fileId = is.readInt();
String fileName = Utils.readString(is);
int parentId = is.readInt();
Inode inode = null;
if (type == Image.T_INODE_FILE) {
long blockSizeByte = is.readLong();
long length = is.readLong();
boolean isComplete = is.readBoolean();
boolean isPin = is.readBoolean();
boolean isCache = is.readBoolean();
String checkpointPath = Utils.readString(is);
InodeFile tInode =
new InodeFile(fileName, fileId, parentId, blockSizeByte, creationTimeMs);
try {
tInode.setLength(length);
} catch (Exception e) {
throw new IOException(e);
}
tInode.setComplete(isComplete);
tInode.setPin(isPin);
tInode.setCache(isCache);
tInode.setCheckpointPath(checkpointPath);
inode = tInode;
} else {
int numberOfChildren = is.readInt();
int[] children = new int[numberOfChildren];
for (int k = 0; k < numberOfChildren; k ++) {
children[k] = is.readInt();
}
if (type == Image.T_INODE_FOLDER) {
InodeFolder folder = new InodeFolder(fileName, fileId, parentId, creationTimeMs);
folder.addChildren(children);
inode = folder;
} else {
int columns = is.readInt();
ByteBuffer metadata = Utils.readByteBuffer(is);
try {
InodeRawTable table = new InodeRawTable(
fileName, fileId, parentId, columns, metadata, creationTimeMs);
table.addChildren(children);
inode = table;
} catch (TachyonException e) {
throw new IOException(e);
}
}
}
LOG.info("Putting " + inode);
if (inode.getId() > mInodeCounter.get()) {
mInodeCounter.set(inode.getId());
}
if (inode.getId() == 1) {
mRoot = (InodeFolder) inode;
}
mInodes.put(inode.getId(), inode);
}
}
}
private void createImageInodeWriter(Inode inode, DataOutputStream os) throws IOException {
if (inode.isFile()) {
InodeFile file = (InodeFile) inode;
os.writeByte(Image.T_INODE_FILE);
os.writeLong(file.getCreationTimeMs());
os.writeInt(file.getId());
Utils.writeString(file.getName(), os);
os.writeInt(file.getParentId());
os.writeLong(file.getBlockSizeByte());
os.writeLong(file.getLength());
os.writeBoolean(file.isComplete());
os.writeBoolean(file.isPin());
os.writeBoolean(file.isCache());
Utils.writeString(file.getCheckpointPath(), os);
} else {
InodeFolder folder = (InodeFolder) inode;
if (folder.isRawTable()) {
os.writeByte(Image.T_INODE_RAW_TABLE);
} else {
os.writeByte(Image.T_INODE_FOLDER);
}
os.writeLong(folder.getCreationTimeMs());
os.writeInt(folder.getId());
Utils.writeString(folder.getName(), os);
os.writeInt(folder.getParentId());
List<Integer> children = folder.getChildrenIds();
os.writeInt(children.size());
for (int k = 0; k < children.size(); k ++) {
os.writeInt(children.get(k));
}
if (folder.isRawTable()) {
InodeRawTable table = (InodeRawTable) folder;
os.writeInt(table.getColumns());
Utils.writeByteBuffer(table.getMetadata(), os);
}
}
}
public long createNewBlock(int fileId) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exit.");
}
if (!inode.isFile()) {
throw new FileDoesNotExistException("File " + fileId + " is not a file.");
}
return ((InodeFile) inode).getNewBlockId();
}
}
public void completeFile(int fileId) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exit.");
}
if (!inode.isFile()) {
throw new FileDoesNotExistException("File " + fileId + " is not a file.");
}
((InodeFile) inode).setComplete();
mJournal.getEditLog().completeFile(fileId);
mJournal.getEditLog().flush();
}
}
public int createRawTable(String path, int columns, ByteBuffer metadata)
throws FileAlreadyExistException, InvalidPathException, TableColumnException,
TachyonException {
LOG.info("createRawTable" + CommonUtils.parametersToString(path, columns));
if (columns <= 0 || columns >= Constants.MAX_COLUMNS) {
throw new TableColumnException("Column " + columns + " should between 0 to " +
Constants.MAX_COLUMNS);
}
int id;
try {
id = createFile(true, path, true, columns, metadata, 0);
} catch (BlockInfoException e) {
throw new FileAlreadyExistException(e.getMessage());
}
for (int k = 0; k < columns; k ++) {
mkdir(path + Constants.PATH_SEPARATOR + COL + k);
}
return id;
}
private boolean _delete(int fileId, boolean recursive) throws TachyonException {
LOG.info("delete(" + fileId + ")");
boolean succeed = true;
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
return true;
}
if (inode.isDirectory()) {
List<Integer> childrenIds = ((InodeFolder) inode).getChildrenIds();
if (!recursive && childrenIds.size() != 0) {
return false;
}
for (int childId : childrenIds) {
succeed = succeed && delete(childId, recursive);
}
}
InodeFolder parent = (InodeFolder) mInodes.get(inode.getParentId());
parent.removeChild(inode.getId());
mInodes.remove(inode.getId());
if (inode.isFile()) {
String checkpointPath = ((InodeFile) inode).getCheckpointPath();
if (!checkpointPath.equals("")) {
UnderFileSystem ufs = UnderFileSystem.get(checkpointPath);
try {
if (!ufs.delete(checkpointPath, true)) {
return false;
}
} catch (IOException e) {
throw new TachyonException(e.getMessage());
}
}
List<Pair<Long, Long>> blockIdWorkerIdList = ((InodeFile) inode).getBlockIdWorkerIdPairs();
synchronized (mWorkers) {
for (Pair<Long, Long> blockIdWorkerId: blockIdWorkerIdList) {
MasterWorkerInfo workerInfo = mWorkers.get(blockIdWorkerId.getSecond());
if (workerInfo != null) {
workerInfo.updateToRemovedBlock(true, blockIdWorkerId.getFirst());
}
}
}
if (((InodeFile) inode).isPin()) {
synchronized (mFileIdPinList) {
mFileIdPinList.remove(inode.getId());
}
}
}
inode.reverseId();
return succeed;
}
}
/**
* Delete a file.
* @param fileId The file to be deleted.
* @param recursive whether delete the file recursively or not.
* @return succeed or not
* @throws TachyonException
*/
public boolean delete(int fileId, boolean recursive) throws TachyonException {
synchronized (mRoot) {
boolean ret = _delete(fileId, recursive);
mJournal.getEditLog().delete(fileId, recursive);
mJournal.getEditLog().flush();
return ret;
}
}
public boolean delete(String path, boolean recursive) throws TachyonException {
LOG.info("delete(" + path + ")");
synchronized (mRoot) {
Inode inode = null;
try {
inode = getInode(path);
} catch (InvalidPathException e) {
return false;
}
if (inode == null) {
return true;
}
return delete(inode.getId(), recursive);
}
}
public long getBlockIdBasedOnOffset(int fileId, long offset) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist.");
}
if (!inode.isFile()) {
throw new FileDoesNotExistException(fileId + " is not a file.");
}
return ((InodeFile) inode).getBlockIdBasedOnOffset(offset);
}
}
public long getCapacityBytes() {
long ret = 0;
synchronized (mWorkers) {
for (MasterWorkerInfo worker : mWorkers.values()) {
ret += worker.getCapacityBytes();
}
}
return ret;
}
public ClientBlockInfo getClientBlockInfo(long blockId)
throws FileDoesNotExistException, IOException, BlockInfoException {
int fileId = BlockInfo.computeInodeId(blockId);
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null || inode.isDirectory()) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist.");
}
ClientBlockInfo ret =
((InodeFile) inode).getClientBlockInfo(BlockInfo.computeBlockIndex(blockId));
LOG.debug("getClientBlockInfo: " + blockId + ret);
return ret;
}
}
public ClientFileInfo getClientFileInfo(int fid) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fid);
if (inode == null) {
throw new FileDoesNotExistException("FileId " + fid + " does not exist.");
}
ClientFileInfo ret = inode.generateClientFileInfo(getPath(inode));
LOG.debug("getClientFileInfo(" + fid + "): " + ret);
return ret;
}
}
public ClientFileInfo getClientFileInfo(String path)
throws FileDoesNotExistException, InvalidPathException {
LOG.info("getClientFileInfo(" + path + ")");
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
return getClientFileInfo(inode.getId());
}
}
public ClientRawTableInfo getClientRawTableInfo(int id) throws TableDoesNotExistException {
LOG.info("getClientRawTableInfo(" + id + ")");
synchronized (mRoot) {
Inode inode = mInodes.get(id);
if (inode == null || inode.isFile() || !((InodeFolder) inode).isRawTable()) {
throw new TableDoesNotExistException("Table " + id + " does not exist.");
}
ClientRawTableInfo ret = new ClientRawTableInfo();
ret.id = inode.getId();
ret.name = inode.getName();
ret.path = getPath(inode);
ret.columns = ((InodeRawTable) inode).getColumns();
ret.metadata = ((InodeRawTable) inode).getMetadata();
return ret;
}
}
public ClientRawTableInfo getClientRawTableInfo(String path)
throws TableDoesNotExistException, InvalidPathException {
LOG.info("getClientRawTableInfo(" + path + ")");
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new TableDoesNotExistException(path);
}
return getClientRawTableInfo(inode.getId());
}
}
/**
* If the <code>path</code> is a directory, return all the direct entries in it. If the
* <code>path</code> is a file, return its ClientFileInfo.
* @param path the target directory/file path
* @return A list of ClientFileInfo
* @throws FileDoesNotExistException
* @throws InvalidPathException
*/
public List<ClientFileInfo> getFilesInfo(String path)
throws FileDoesNotExistException, InvalidPathException {
List<ClientFileInfo> ret = new ArrayList<ClientFileInfo>();
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isDirectory()) {
List<Integer> childernIds = ((InodeFolder) inode).getChildrenIds();
if (!path.endsWith("/")) {
path += "/";
}
synchronized (mRoot) {
for (int k : childernIds) {
ret.add(getClientFileInfo(k));
}
}
} else {
ret.add(getClientFileInfo(inode.getId()));
}
return ret;
}
public String getFileNameById(int fileId) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist");
}
return getPath(inode);
}
}
public List<ClientBlockInfo> getFileLocations(int fileId)
throws FileDoesNotExistException, IOException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null || inode.isDirectory()) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist.");
}
List<ClientBlockInfo> ret = ((InodeFile) inode).getClientBlockInfos();
LOG.debug("getFileLocations: " + fileId + ret);
return ret;
}
}
public List<ClientBlockInfo> getFileLocations(String path)
throws FileDoesNotExistException, InvalidPathException, IOException {
LOG.info("getFileLocations: " + path);
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
return getFileLocations(inode.getId());
}
}
/**
* Get the file id of the file.
* @param path The path of the file
* @return The file id of the file. -1 if the file does not exist.
* @throws InvalidPathException
*/
public int getFileId(String path) throws InvalidPathException {
LOG.debug("getFileId(" + path + ")");
Inode inode = getInode(path);
int ret = -1;
if (inode != null) {
ret = inode.getId();
}
LOG.debug("getFileId(" + path + "): " + ret);
return ret;
}
private Inode getInode(String path) throws InvalidPathException {
return getInode(getPathNames(path));
}
private Inode getInode(String[] pathNames) throws InvalidPathException {
if (pathNames == null || pathNames.length == 0) {
return null;
}
if (pathNames.length == 1) {
if (pathNames[0].equals("")) {
return mRoot;
} else {
LOG.info("InvalidPathException: File name starts with " + pathNames[0]);
throw new InvalidPathException("File name starts with " + pathNames[0]);
}
}
Inode cur = mRoot;
synchronized (mRoot) {
for (int k = 1; k < pathNames.length && cur != null; k ++) {
String name = pathNames[k];
if (cur.isFile()) {
return null;
}
cur = ((InodeFolder) cur).getChild(name, mInodes);
}
return cur;
}
}
/**
* Get absolute paths of all in memory files.
*
* @return absolute paths of all in memory files.
*/
public List<String> getInMemoryFiles() {
List<String> ret = new ArrayList<String>();
LOG.info("getInMemoryFiles()");
Queue<Pair<InodeFolder, String>> nodesQueue = new LinkedList<Pair<InodeFolder, String>>();
synchronized (mRoot) {
nodesQueue.add(new Pair<InodeFolder, String>(mRoot, ""));
while (!nodesQueue.isEmpty()) {
Pair<InodeFolder, String> tPair = nodesQueue.poll();
InodeFolder tFolder = tPair.getFirst();
String curPath = tPair.getSecond();
List<Integer> childrenIds = tFolder.getChildrenIds();
for (int id : childrenIds) {
Inode tInode = mInodes.get(id);
String newPath = curPath + Constants.PATH_SEPARATOR + tInode.getName();
if (tInode.isDirectory()) {
nodesQueue.add(new Pair<InodeFolder, String>((InodeFolder) tInode, newPath));
} else if (((InodeFile) tInode).isFullyInMemory()) {
ret.add(newPath);
}
}
}
}
return ret;
}
public InetSocketAddress getMasterAddress() {
return MASTER_ADDRESS;
}
private static String getName(String path) throws InvalidPathException {
String[] pathNames = getPathNames(path);
return pathNames[pathNames.length - 1];
}
public long getNewUserId() {
return mUserCounter.incrementAndGet();
}
public int getNumberOfFiles(String path) throws InvalidPathException, FileDoesNotExistException {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isFile()) {
return 1;
}
return ((InodeFolder) inode).getNumberOfChildren();
}
private String getPath(Inode inode) {
synchronized (mRoot) {
if (inode.getId() == 1) {
return "/";
}
if (inode.getParentId() == 1) {
return Constants.PATH_SEPARATOR + inode.getName();
}
return getPath(mInodes.get(inode.getParentId())) + Constants.PATH_SEPARATOR + inode.getName();
}
}
private static String[] getPathNames(String path) throws InvalidPathException {
CommonUtils.validatePath(path);
if (path.length() == 1 && path.equals(Constants.PATH_SEPARATOR)) {
String[] ret = new String[1];
ret[0] = "";
return ret;
}
return path.split(Constants.PATH_SEPARATOR);
}
public List<String> getPinList() {
return mPinList.getList();
}
public List<Integer> getPinIdList() {
synchronized (mFileIdPinList) {
List<Integer> ret = new ArrayList<Integer>();
for (int id : mFileIdPinList) {
ret.add(id);
}
return ret;
}
}
public int getRawTableId(String path) throws InvalidPathException {
Inode inode = getInode(path);
if (inode == null || inode.isFile() || !((InodeFolder) inode).isRawTable()) {
return -1;
}
return inode.getId();
}
public long getStarttimeMs() {
return START_TIME_MS;
}
public long getUnderFsCapacityBytes() throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(CommonConf.get().UNDERFS_DATA_FOLDER);
return ufs.getSpace(CommonConf.get().UNDERFS_DATA_FOLDER, SpaceType.SPACE_TOTAL);
}
public long getUnderFsUsedBytes() throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(CommonConf.get().UNDERFS_DATA_FOLDER);
return ufs.getSpace(CommonConf.get().UNDERFS_DATA_FOLDER, SpaceType.SPACE_USED);
}
public long getUnderFsFreeBytes() throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(CommonConf.get().UNDERFS_DATA_FOLDER);
return ufs.getSpace(CommonConf.get().UNDERFS_DATA_FOLDER, SpaceType.SPACE_FREE);
}
public long getUsedBytes() {
long ret = 0;
synchronized (mWorkers) {
for (MasterWorkerInfo worker : mWorkers.values()) {
ret += worker.getUsedBytes();
}
}
return ret;
}
public NetAddress getWorker(boolean random, String host) {
synchronized (mWorkers) {
if (mWorkerAddressToId.isEmpty()) {
return null;
}
if (random) {
int index = new Random(mWorkerAddressToId.size()).nextInt(mWorkerAddressToId.size());
for (InetSocketAddress address: mWorkerAddressToId.keySet()) {
if (index == 0) {
LOG.debug("getRandomWorker: " + address);
return new NetAddress(address.getHostName(), address.getPort());
}
index --;
}
for (InetSocketAddress address: mWorkerAddressToId.keySet()) {
LOG.debug("getRandomWorker: " + address);
return new NetAddress(address.getHostName(), address.getPort());
}
} else {
for (InetSocketAddress address: mWorkerAddressToId.keySet()) {
if (address.getHostName().equals(host)
|| address.getAddress().getHostAddress().equals(host)
|| address.getAddress().getCanonicalHostName().equals(host)) {
LOG.debug("getLocalWorker: " + address);
return new NetAddress(address.getHostName(), address.getPort());
}
}
}
}
LOG.info("getLocalWorker: no local worker on " + host);
return null;
}
public int getWorkerCount() {
synchronized (mWorkers) {
return mWorkers.size();
}
}
private MasterWorkerInfo getWorkerInfo(long workerId) {
MasterWorkerInfo ret = null;
synchronized (mWorkers) {
ret = mWorkers.get(workerId);
if (ret == null) {
LOG.error("No worker: " + workerId);
}
}
return ret;
}
public List<ClientWorkerInfo> getWorkersInfo() {
List<ClientWorkerInfo> ret = new ArrayList<ClientWorkerInfo>();
synchronized (mWorkers) {
for (MasterWorkerInfo worker : mWorkers.values()) {
ret.add(worker.generateClientWorkerInfo());
}
}
return ret;
}
public List<String> getWhiteList() {
return mWhiteList.getList();
}
public List<Integer> listFiles(String path, boolean recursive)
throws InvalidPathException, FileDoesNotExistException {
List<Integer> ret = new ArrayList<Integer>();
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isFile()) {
ret.add(inode.getId());
} else if (recursive) {
Queue<Integer> queue = new LinkedList<Integer>();
queue.addAll(((InodeFolder) inode).getChildrenIds());
while (!queue.isEmpty()) {
int id = queue.poll();
inode = mInodes.get(id);
if (inode.isDirectory()) {
queue.addAll(((InodeFolder) inode).getChildrenIds());
} else {
ret.add(id);
}
}
}
}
return ret;
}
public List<String> ls(String path, boolean recursive)
throws InvalidPathException, FileDoesNotExistException {
List<String> ret = new ArrayList<String>();
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isFile()) {
ret.add(path);
} else {
List<Integer> childernIds = ((InodeFolder) inode).getChildrenIds();
if (!path.endsWith("/")) {
path += "/";
}
ret.add(path);
synchronized (mRoot) {
for (int k : childernIds) {
inode = mInodes.get(k);
if (inode != null) {
if (recursive) {
ret.addAll(ls(path + inode.getName(), true));
} else {
ret.add(path + inode.getName());
}
}
}
}
}
return ret;
}
public boolean mkdir(String path)
throws FileAlreadyExistException, InvalidPathException, TachyonException {
try {
return createFile(true, path, true, -1, null, 0) > 0;
} catch (BlockInfoException e) {
throw new FileAlreadyExistException(e.getMessage());
}
}
public long registerWorker(NetAddress workerNetAddress, long totalBytes,
long usedBytes, List<Long> currentBlockIds) throws BlockInfoException {
long id = 0;
InetSocketAddress workerAddress =
new InetSocketAddress(workerNetAddress.mHost, workerNetAddress.mPort);
LOG.info("registerWorker(): WorkerNetAddress: " + workerAddress);
synchronized (mWorkers) {
if (mWorkerAddressToId.containsKey(workerAddress)) {
id = mWorkerAddressToId.get(workerAddress);
mWorkerAddressToId.remove(workerAddress);
LOG.warn("The worker " + workerAddress + " already exists as id " + id + ".");
}
if (id != 0 && mWorkers.containsKey(id)) {
MasterWorkerInfo tWorkerInfo = mWorkers.get(id);
mWorkers.remove(id);
mLostWorkers.add(tWorkerInfo);
LOG.warn("The worker with id " + id + " has been removed.");
}
id = START_TIME_NS_PREFIX + mWorkerCounter.incrementAndGet();
MasterWorkerInfo tWorkerInfo = new MasterWorkerInfo(id, workerAddress, totalBytes);
tWorkerInfo.updateUsedBytes(usedBytes);
tWorkerInfo.updateBlocks(true, currentBlockIds);
tWorkerInfo.updateLastUpdatedTimeMs();
mWorkers.put(id, tWorkerInfo);
mWorkerAddressToId.put(workerAddress, id);
LOG.info("registerWorker(): " + tWorkerInfo);
}
synchronized (mRoot) {
for (long blockId: currentBlockIds) {
int fileId = BlockInfo.computeInodeId(blockId);
int blockIndex = BlockInfo.computeBlockIndex(blockId);
Inode inode = mInodes.get(fileId);
if (inode != null && inode.isFile()) {
((InodeFile) inode).addLocation(blockIndex, id, workerNetAddress);
} else {
LOG.warn("registerWorker failed to add fileId " + fileId + " blockIndex " + blockIndex);
}
}
}
return id;
}
private void rename(Inode srcInode, String dstPath)
throws FileAlreadyExistException, InvalidPathException, FileDoesNotExistException {
if (getInode(dstPath) != null) {
throw new FileAlreadyExistException("Failed to rename: " + dstPath + " already exist");
}
String dstName = getName(dstPath);
String dstFolderPath = dstPath.substring(0, dstPath.length() - dstName.length() - 1);
// If we are renaming into the root folder
if (dstFolderPath.isEmpty()) {
dstFolderPath = "/";
}
Inode dstFolderInode = getInode(dstFolderPath);
if (dstFolderInode == null || dstFolderInode.isFile()) {
throw new FileDoesNotExistException("Failed to rename: " + dstFolderPath +
" does not exist.");
}
srcInode.setName(dstName);
InodeFolder parent = (InodeFolder) mInodes.get(srcInode.getParentId());
parent.removeChild(srcInode.getId());
srcInode.setParentId(dstFolderInode.getId());
((InodeFolder) dstFolderInode).addChild(srcInode.getId());
mJournal.getEditLog().rename(srcInode.getId(), dstPath);
mJournal.getEditLog().flush();
}
public void rename(int fileId, String dstPath)
throws FileDoesNotExistException, FileAlreadyExistException, InvalidPathException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("Failed to rename: " + fileId + " does not exist");
}
rename(inode, dstPath);
}
}
public void rename(String srcPath, String dstPath)
throws FileAlreadyExistException, FileDoesNotExistException, InvalidPathException {
synchronized (mRoot) {
Inode inode = getInode(srcPath);
if (inode == null) {
throw new FileDoesNotExistException("Failed to rename: " + srcPath + " does not exist");
}
rename(inode, dstPath);
}
}
public void unpinFile(int fileId) throws FileDoesNotExistException {
// TODO Change meta data only. Data will be evicted from worker based on data replacement
// policy. TODO May change it to be active from V0.2
LOG.info("unpinFile(" + fileId + ")");
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("Failed to unpin " + fileId);
}
((InodeFile) inode).setPin(false);
synchronized (mFileIdPinList) {
mFileIdPinList.remove(fileId);
}
mJournal.getEditLog().unpinFile(fileId);
mJournal.getEditLog().flush();
}
}
public void updateRawTableMetadata(int tableId, ByteBuffer metadata)
throws TableDoesNotExistException, TachyonException {
synchronized (mRoot) {
Inode inode = mInodes.get(tableId);
if (inode == null || inode.getInodeType() != InodeType.RawTable) {
throw new TableDoesNotExistException("Table " + tableId + " does not exist.");
}
((InodeRawTable) inode).updateMetadata(metadata);
mJournal.getEditLog().updateRawTableMetadata(tableId, metadata);
mJournal.getEditLog().flush();
}
}
public Command workerHeartbeat(long workerId, long usedBytes, List<Long> removedBlockIds)
throws BlockInfoException {
LOG.debug("WorkerId: " + workerId);
synchronized (mRoot) {
synchronized (mWorkers) {
MasterWorkerInfo tWorkerInfo = mWorkers.get(workerId);
if (tWorkerInfo == null) {
LOG.info("worker_heartbeat(): Does not contain worker with ID " + workerId +
" . Send command to let it re-register.");
return new Command(CommandType.Register, new ArrayList<Long>());
}
tWorkerInfo.updateUsedBytes(usedBytes);
tWorkerInfo.updateBlocks(false, removedBlockIds);
tWorkerInfo.updateToRemovedBlocks(false, removedBlockIds);
tWorkerInfo.updateLastUpdatedTimeMs();
for (long blockId : removedBlockIds) {
int fileId = BlockInfo.computeInodeId(blockId);
int blockIndex = BlockInfo.computeBlockIndex(blockId);
Inode inode = mInodes.get(fileId);
if (inode == null) {
LOG.error("File " + fileId + " does not exist");
} else if (inode.isFile()) {
((InodeFile) inode).removeLocation(blockIndex, workerId);
LOG.debug("File " + fileId + " block " + blockIndex +
" was evicted from worker " + workerId);
}
}
List<Long> toRemovedBlocks = tWorkerInfo.getToRemovedBlocks();
if (toRemovedBlocks.size() != 0) {
return new Command(CommandType.Free, toRemovedBlocks);
}
}
}
return new Command(CommandType.Nothing, new ArrayList<Long>());
}
public void stop() {
mHeartbeatThread.shutdown();
}
}
|
src/main/java/tachyon/MasterInfo.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tachyon;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Queue;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.log4j.Logger;
import tachyon.UnderFileSystem.SpaceType;
import tachyon.conf.CommonConf;
import tachyon.conf.MasterConf;
import tachyon.io.Utils;
import tachyon.thrift.BlockInfoException;
import tachyon.thrift.ClientBlockInfo;
import tachyon.thrift.ClientFileInfo;
import tachyon.thrift.ClientRawTableInfo;
import tachyon.thrift.ClientWorkerInfo;
import tachyon.thrift.Command;
import tachyon.thrift.CommandType;
import tachyon.thrift.FileAlreadyExistException;
import tachyon.thrift.FileDoesNotExistException;
import tachyon.thrift.InvalidPathException;
import tachyon.thrift.NetAddress;
import tachyon.thrift.SuspectedFileSizeException;
import tachyon.thrift.TableColumnException;
import tachyon.thrift.TableDoesNotExistException;
import tachyon.thrift.TachyonException;
import tachyon.util.CommonUtils;
/**
* A global view of filesystem in master.
*/
public class MasterInfo {
public static final String COL = "COL_";
private final Logger LOG = Logger.getLogger(Constants.LOGGER_TYPE);
private final InetSocketAddress MASTER_ADDRESS;
private final long START_TIME_NS_PREFIX;
private final long START_TIME_MS;
private final MasterConf MASTER_CONF;
private CheckpointInfo mCheckpointInfo = new CheckpointInfo(0, 0);
private AtomicInteger mInodeCounter = new AtomicInteger(0);
private AtomicInteger mUserCounter = new AtomicInteger(0);
private AtomicInteger mWorkerCounter = new AtomicInteger(0);
// Root Inode's id must be 1.
private InodeFolder mRoot;
private Map<Integer, Inode> mInodes = new HashMap<Integer, Inode>();
private Map<Long, MasterWorkerInfo> mWorkers = new HashMap<Long, MasterWorkerInfo>();
private Map<InetSocketAddress, Long> mWorkerAddressToId = new HashMap<InetSocketAddress, Long>();
private BlockingQueue<MasterWorkerInfo> mLostWorkers = new ArrayBlockingQueue<MasterWorkerInfo>(32);
// TODO Check the logic related to this two lists.
private PrefixList mWhiteList;
private PrefixList mPinList;
private Set<Integer> mFileIdPinList;
private Journal mJournal;
private HeartbeatThread mHeartbeatThread;
/**
* Master info periodical status check.
*/
public class MasterInfoHeartbeatExecutor implements HeartbeatExecutor {
@Override
public void heartbeat() {
LOG.debug("System status checking.");
Set<Long> lostWorkers = new HashSet<Long>();
synchronized (mWorkers) {
for (Entry<Long, MasterWorkerInfo> worker: mWorkers.entrySet()) {
if (CommonUtils.getCurrentMs() - worker.getValue().getLastUpdatedTimeMs()
> MASTER_CONF.WORKER_TIMEOUT_MS) {
LOG.error("The worker " + worker.getValue() + " got timed out!");
mLostWorkers.add(worker.getValue());
lostWorkers.add(worker.getKey());
}
}
for (long workerId: lostWorkers) {
MasterWorkerInfo workerInfo = mWorkers.get(workerId);
mWorkerAddressToId.remove(workerInfo.getAddress());
mWorkers.remove(workerId);
}
}
boolean hadFailedWorker = false;
while (mLostWorkers.size() != 0) {
hadFailedWorker = true;
MasterWorkerInfo worker = mLostWorkers.poll();
// TODO these a lock is not efficient. Since node failure is rare, this is fine for now.
synchronized (mRoot) {
try {
for (long blockId: worker.getBlocks()) {
int fileId = BlockInfo.computeInodeId(blockId);
InodeFile tFile = (InodeFile) mInodes.get(fileId);
if (tFile != null) {
int blockIndex = BlockInfo.computeBlockIndex(blockId);
tFile.removeLocation(blockIndex, worker.getId());
if (!tFile.hasCheckpointed() && tFile.getBlockLocations(blockIndex).size() == 0) {
LOG.info("Block " + blockId + " got lost from worker " + worker.getId() + " .");
} else {
LOG.info("Block " + blockId + " only lost an in memory copy from worker " +
worker.getId());
}
}
}
} catch (BlockInfoException e) {
LOG.error(e);
}
}
}
if (hadFailedWorker) {
LOG.warn("Restarting failed workers.");
try {
java.lang.Runtime.getRuntime().exec(CommonConf.get().TACHYON_HOME +
"/bin/tachyon-start.sh restart_workers");
} catch (IOException e) {
LOG.error(e.getMessage());
}
}
}
}
public class RecomputeCmd implements Runnable {
private final String CMD;
private final String FILE_PATH;
public RecomputeCmd(String cmd, String filePath) {
CMD = cmd;
FILE_PATH = filePath;
}
@Override
public void run() {
try {
LOG.info("Exec " + CMD + " output to " + FILE_PATH);
Process p = java.lang.Runtime.getRuntime().exec(CMD);
String line;
BufferedReader bri = new BufferedReader(new InputStreamReader(p.getInputStream()));
BufferedReader bre = new BufferedReader(new InputStreamReader(p.getErrorStream()));
File file = new File(FILE_PATH);
FileWriter fw = new FileWriter(file.getAbsoluteFile());
BufferedWriter bw = new BufferedWriter(fw);
while ((line = bri.readLine()) != null) {
bw.write(line + "\n");
}
bri.close();
while ((line = bre.readLine()) != null) {
bw.write(line + "\n");
}
bre.close();
bw.flush();
bw.close();
p.waitFor();
LOG.info("Exec " + CMD + " output to " + FILE_PATH + " done.");
} catch (IOException e) {
LOG.error(e.getMessage());
} catch (InterruptedException e) {
LOG.error(e.getMessage());
}
}
}
public MasterInfo(InetSocketAddress address, Journal journal) throws IOException {
MASTER_CONF = MasterConf.get();
mRoot = new InodeFolder("", mInodeCounter.incrementAndGet(), -1, System.currentTimeMillis());
mInodes.put(mRoot.getId(), mRoot);
MASTER_ADDRESS = address;
START_TIME_MS = System.currentTimeMillis();
// TODO This name need to be changed.
START_TIME_NS_PREFIX = START_TIME_MS - (START_TIME_MS % 1000000);
mJournal = journal;
mWhiteList = new PrefixList(MASTER_CONF.WHITELIST);
mPinList = new PrefixList(MASTER_CONF.PINLIST);
mFileIdPinList = Collections.synchronizedSet(new HashSet<Integer>());
mJournal.loadImage(this);
}
public void init() throws IOException {
mCheckpointInfo.updateEditTransactionCounter(mJournal.loadEditLog(this));
mJournal.createImage(this);
mJournal.createEditLog(mCheckpointInfo.getEditTransactionCounter());
mHeartbeatThread = new HeartbeatThread("Master Heartbeat",
new MasterInfoHeartbeatExecutor(), MASTER_CONF.HEARTBEAT_INTERVAL_MS);
mHeartbeatThread.start();
}
/**
* Add a checkpoint to a file.
* @param workerId The worker which submitted the request. -1 if the request is not from a worker.
* @param fileId The file to add the checkpoint.
* @param length The length of the checkpoint.
* @param checkpointPath The path of the checkpoint.
* @return true if the checkpoint is added successfully, false if not.
* @throws FileNotFoundException
* @throws SuspectedFileSizeException
* @throws BlockInfoException
*/
public boolean addCheckpoint(long workerId, int fileId, long length, String checkpointPath)
throws FileNotFoundException, SuspectedFileSizeException, BlockInfoException {
LOG.info(CommonUtils.parametersToString(workerId, fileId, length, checkpointPath));
if (workerId != -1) {
MasterWorkerInfo tWorkerInfo = getWorkerInfo(workerId);
tWorkerInfo.updateLastUpdatedTimeMs();
}
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileNotFoundException("File " + fileId + " does not exist.");
}
if (inode.isDirectory()) {
throw new FileNotFoundException("File " + fileId + " is a folder.");
}
InodeFile tFile = (InodeFile) inode;
boolean needLog = false;
if (tFile.isComplete()) {
if (tFile.getLength() != length) {
throw new SuspectedFileSizeException(fileId + ". Original Size: " +
tFile.getLength() + ". New Size: " + length);
}
} else {
tFile.setLength(length);
needLog = true;
}
if (!tFile.hasCheckpointed()) {
tFile.setCheckpointPath(checkpointPath);
needLog = true;
}
tFile.setComplete();
if (needLog) {
mJournal.getEditLog().addCheckpoint(fileId, length, checkpointPath);
mJournal.getEditLog().flush();
}
return true;
}
}
/**
* A worker cache a block in its memory.
*
* @param workerId
* @param workerUsedBytes
* @param blockId
* @param length
* @throws FileDoesNotExistException
* @throws SuspectedFileSizeException
* @throws BlockInfoException
*/
public void cacheBlock(long workerId, long workerUsedBytes, long blockId, long length)
throws FileDoesNotExistException, SuspectedFileSizeException, BlockInfoException {
LOG.debug(CommonUtils.parametersToString(workerId, workerUsedBytes, blockId, length));
MasterWorkerInfo tWorkerInfo = getWorkerInfo(workerId);
tWorkerInfo.updateBlock(true, blockId);
tWorkerInfo.updateUsedBytes(workerUsedBytes);
tWorkerInfo.updateLastUpdatedTimeMs();
int fileId = BlockInfo.computeInodeId(blockId);
int blockIndex = BlockInfo.computeBlockIndex(blockId);
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exist.");
}
if (inode.isDirectory()) {
throw new FileDoesNotExistException("File " + fileId + " is a folder.");
}
InodeFile tFile = (InodeFile) inode;
if (tFile.getNumberOfBlocks() <= blockIndex) {
addBlock(tFile, new BlockInfo(tFile, blockIndex, length));
}
InetSocketAddress address = tWorkerInfo.ADDRESS;
tFile.addLocation(blockIndex, workerId,
new NetAddress(address.getHostName(), address.getPort()));
}
}
/**
* Called by edit log only.
* @param fileId
* @param blockIndex
* @param blockLength
* @throws FileDoesNotExistException
* @throws BlockInfoException
*/
void opAddBlock(int fileId, int blockIndex, long blockLength)
throws FileDoesNotExistException, BlockInfoException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exist.");
}
if (inode.isDirectory()) {
throw new FileDoesNotExistException("File " + fileId + " is a folder.");
}
addBlock((InodeFile) inode, new BlockInfo((InodeFile) inode, blockIndex, blockLength));
}
}
private void addBlock(InodeFile tFile, BlockInfo blockInfo) throws BlockInfoException {
tFile.addBlock(blockInfo);
mJournal.getEditLog().addBlock(tFile.getId(), blockInfo.BLOCK_INDEX, blockInfo.LENGTH);
mJournal.getEditLog().flush();
}
public int createFile(String path, long blockSizeByte)
throws FileAlreadyExistException, InvalidPathException, BlockInfoException, TachyonException {
return createFile(true, path, false, -1, null, blockSizeByte);
}
// TODO Make this API better.
/**
* Internal API.
* @param recursive
* @param path
* @param directory
* @param columns
* @param metadata
* @param blockSizeByte
* @param creationTimeMs
* @return
* @throws FileAlreadyExistException
* @throws InvalidPathException
* @throws BlockInfoException
* @throws TachyonException
*/
int _createFile(boolean recursive, String path, boolean directory, int columns,
ByteBuffer metadata, long blockSizeByte, long creationTimeMs)
throws FileAlreadyExistException, InvalidPathException, BlockInfoException,
TachyonException {
if (!directory && blockSizeByte < 1) {
throw new BlockInfoException("Invalid block size " + blockSizeByte);
}
LOG.debug("createFile" + CommonUtils.parametersToString(path));
String[] pathNames = getPathNames(path);
synchronized (mRoot) {
Inode inode = getInode(pathNames);
if (inode != null) {
if (inode.isDirectory() && (directory && columns == -1)) {
return inode.getId();
}
LOG.info("FileAlreadyExistException: File " + path + " already exist.");
throw new FileAlreadyExistException("File " + path + " already exist.");
}
String name = pathNames[pathNames.length - 1];
String folderPath = null;
if (path.length() - name.length() == 1) {
folderPath = path.substring(0, path.length() - name.length());
} else {
folderPath = path.substring(0, path.length() - name.length() - 1);
}
inode = getInode(folderPath);
if (inode == null) {
int succeed = 0;
if (recursive) {
succeed = createFile(true, folderPath, true, -1, null, blockSizeByte);
}
if (!recursive || succeed <= 0) {
LOG.info("InvalidPathException: File " + path + " creation failed. Folder "
+ folderPath + " does not exist.");
throw new InvalidPathException("InvalidPathException: File " + path + " creation " +
"failed. Folder " + folderPath + " does not exist.");
} else {
inode = mInodes.get(succeed);
}
} else if (inode.isFile()) {
LOG.info("InvalidPathException: File " + path + " creation failed. "
+ folderPath + " is a file.");
throw new InvalidPathException("File " + path + " creation failed. "
+ folderPath + " is a file");
}
Inode ret = null;
if (directory) {
if (columns != -1) {
ret = new InodeRawTable(name, mInodeCounter.incrementAndGet(), inode.getId(),
columns, metadata, creationTimeMs);
} else {
ret = new InodeFolder(
name, mInodeCounter.incrementAndGet(), inode.getId(), creationTimeMs);
}
} else {
ret = new InodeFile(
name, mInodeCounter.incrementAndGet(), inode.getId(), blockSizeByte, creationTimeMs);
String curPath = getPath(ret);
if (mPinList.inList(curPath)) {
synchronized (mFileIdPinList) {
mFileIdPinList.add(ret.getId());
((InodeFile) ret).setPin(true);
}
}
if (mWhiteList.inList(curPath)) {
((InodeFile) ret).setCache(true);
}
}
mInodes.put(ret.getId(), ret);
((InodeFolder) inode).addChild(ret.getId());
LOG.debug("createFile: File Created: " + ret + " parent: " + inode);
return ret.getId();
}
}
public int createFile(boolean recursive, String path, boolean directory, int columns,
ByteBuffer metadata, long blockSizeByte)
throws FileAlreadyExistException, InvalidPathException, BlockInfoException,
TachyonException {
long creationTimeMs = System.currentTimeMillis();
int ret =
_createFile(recursive, path, directory, columns, metadata, blockSizeByte, creationTimeMs);
mJournal.getEditLog().createFile(
recursive, path, directory, columns, metadata, blockSizeByte, creationTimeMs);
mJournal.getEditLog().flush();
return ret;
}
public void createImage(DataOutputStream os) throws IOException {
Queue<Inode> nodesQueue = new LinkedList<Inode>();
synchronized (mRoot) {
createImageInodeWriter(mRoot, os);
nodesQueue.add(mRoot);
while (!nodesQueue.isEmpty()) {
InodeFolder tFolder = (InodeFolder) nodesQueue.poll();
List<Integer> childrenIds = tFolder.getChildrenIds();
for (int id : childrenIds) {
Inode tInode = mInodes.get(id);
createImageInodeWriter(tInode, os);
if (tInode.isDirectory()) {
nodesQueue.add(tInode);
} else if (((InodeFile) tInode).isPin()) {
synchronized (mFileIdPinList) {
mFileIdPinList.add(tInode.getId());
}
}
}
}
os.writeByte(Image.T_CHECKPOINT);
os.writeInt(mInodeCounter.get());
os.writeLong(mCheckpointInfo.getEditTransactionCounter());
}
}
/**
* Load the image from <code>is</code>. Assume this blocks the whole MasterInfo.
* @param is the inputstream to load the image.
* @throws IOException
*/
public void loadImage(DataInputStream is) throws IOException {
while (true) {
byte type = -1;
try {
type = is.readByte();
} catch (EOFException e) {
return;
}
if (type == Image.T_CHECKPOINT) {
mInodeCounter.set(is.readInt());
mCheckpointInfo.updateEditTransactionCounter(is.readLong());
} else {
if (type > Image.T_INODE_RAW_TABLE) {
throw new IOException("Corrupted image with unknown element type: " + type);
}
long creationTimeMs = is.readLong();
int fileId = is.readInt();
String fileName = Utils.readString(is);
int parentId = is.readInt();
Inode inode = null;
if (type == Image.T_INODE_FILE) {
long blockSizeByte = is.readLong();
long length = is.readLong();
boolean isComplete = is.readBoolean();
boolean isPin = is.readBoolean();
boolean isCache = is.readBoolean();
String checkpointPath = Utils.readString(is);
InodeFile tInode =
new InodeFile(fileName, fileId, parentId, blockSizeByte, creationTimeMs);
try {
tInode.setLength(length);
} catch (Exception e) {
throw new IOException(e);
}
tInode.setComplete(isComplete);
tInode.setPin(isPin);
tInode.setCache(isCache);
tInode.setCheckpointPath(checkpointPath);
inode = tInode;
} else {
int numberOfChildren = is.readInt();
int[] children = new int[numberOfChildren];
for (int k = 0; k < numberOfChildren; k ++) {
children[k] = is.readInt();
}
if (type == Image.T_INODE_FOLDER) {
InodeFolder folder = new InodeFolder(fileName, fileId, parentId, creationTimeMs);
folder.addChildren(children);
inode = folder;
} else {
int columns = is.readInt();
ByteBuffer metadata = Utils.readByteBuffer(is);
try {
InodeRawTable table = new InodeRawTable(
fileName, fileId, parentId, columns, metadata, creationTimeMs);
table.addChildren(children);
inode = table;
} catch (TachyonException e) {
throw new IOException(e);
}
}
}
LOG.info("Putting " + inode);
if (inode.getId() > mInodeCounter.get()) {
mInodeCounter.set(inode.getId());
}
if (inode.getId() == 1) {
mRoot = (InodeFolder) inode;
}
mInodes.put(inode.getId(), inode);
}
}
}
private void createImageInodeWriter(Inode inode, DataOutputStream os) throws IOException {
if (inode.isFile()) {
InodeFile file = (InodeFile) inode;
os.writeByte(Image.T_INODE_FILE);
os.writeLong(file.getCreationTimeMs());
os.writeInt(file.getId());
Utils.writeString(file.getName(), os);
os.writeInt(file.getParentId());
os.writeLong(file.getBlockSizeByte());
os.writeLong(file.getLength());
os.writeBoolean(file.isComplete());
os.writeBoolean(file.isPin());
os.writeBoolean(file.isCache());
Utils.writeString(file.getCheckpointPath(), os);
} else {
InodeFolder folder = (InodeFolder) inode;
if (folder.isRawTable()) {
os.writeByte(Image.T_INODE_RAW_TABLE);
} else {
os.writeByte(Image.T_INODE_FOLDER);
}
os.writeLong(folder.getCreationTimeMs());
os.writeInt(folder.getId());
Utils.writeString(folder.getName(), os);
os.writeInt(folder.getParentId());
List<Integer> children = folder.getChildrenIds();
os.writeInt(children.size());
for (int k = 0; k < children.size(); k ++) {
os.writeInt(children.get(k));
}
if (folder.isRawTable()) {
InodeRawTable table = (InodeRawTable) folder;
os.writeInt(table.getColumns());
Utils.writeByteBuffer(table.getMetadata(), os);
}
}
}
public long createNewBlock(int fileId) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exit.");
}
if (!inode.isFile()) {
throw new FileDoesNotExistException("File " + fileId + " is not a file.");
}
return ((InodeFile) inode).getNewBlockId();
}
}
public void completeFile(int fileId) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("File " + fileId + " does not exit.");
}
if (!inode.isFile()) {
throw new FileDoesNotExistException("File " + fileId + " is not a file.");
}
((InodeFile) inode).setComplete();
mJournal.getEditLog().completeFile(fileId);
mJournal.getEditLog().flush();
}
}
public int createRawTable(String path, int columns, ByteBuffer metadata)
throws FileAlreadyExistException, InvalidPathException, TableColumnException,
TachyonException {
LOG.info("createRawTable" + CommonUtils.parametersToString(path, columns));
if (columns <= 0 || columns >= Constants.MAX_COLUMNS) {
throw new TableColumnException("Column " + columns + " should between 0 to " +
Constants.MAX_COLUMNS);
}
int id;
try {
id = createFile(true, path, true, columns, metadata, 0);
} catch (BlockInfoException e) {
throw new FileAlreadyExistException(e.getMessage());
}
for (int k = 0; k < columns; k ++) {
mkdir(path + Constants.PATH_SEPARATOR + COL + k);
}
return id;
}
private boolean _delete(int fileId, boolean recursive) throws TachyonException {
LOG.info("delete(" + fileId + ")");
boolean succeed = true;
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
return true;
}
if (inode.isDirectory()) {
List<Integer> childrenIds = ((InodeFolder) inode).getChildrenIds();
if (!recursive && childrenIds.size() != 0) {
return false;
}
for (int childId : childrenIds) {
succeed = succeed && delete(childId, recursive);
}
}
InodeFolder parent = (InodeFolder) mInodes.get(inode.getParentId());
parent.removeChild(inode.getId());
mInodes.remove(inode.getId());
if (inode.isFile()) {
String checkpointPath = ((InodeFile) inode).getCheckpointPath();
if (!checkpointPath.equals("")) {
UnderFileSystem ufs = UnderFileSystem.get(checkpointPath);
try {
if (!ufs.delete(checkpointPath, true)) {
return false;
}
} catch (IOException e) {
throw new TachyonException(e.getMessage());
}
}
List<Pair<Long, Long>> blockIdWorkerIdList = ((InodeFile) inode).getBlockIdWorkerIdPairs();
synchronized (mWorkers) {
for (Pair<Long, Long> blockIdWorkerId: blockIdWorkerIdList) {
MasterWorkerInfo workerInfo = mWorkers.get(blockIdWorkerId.getSecond());
if (workerInfo != null) {
workerInfo.updateToRemovedBlock(true, blockIdWorkerId.getFirst());
}
}
}
if (((InodeFile) inode).isPin()) {
synchronized (mFileIdPinList) {
mFileIdPinList.remove(inode.getId());
}
}
}
inode.reverseId();
return succeed;
}
}
/**
* Delete a file.
* @param fileId The file to be deleted.
* @param recursive whether delete the file recursively or not.
* @return succeed or not
* @throws TachyonException
*/
public boolean delete(int fileId, boolean recursive) throws TachyonException {
boolean ret = _delete(fileId, recursive);
mJournal.getEditLog().delete(fileId, recursive);
mJournal.getEditLog().flush();
return ret;
}
public boolean delete(String path, boolean recursive) throws TachyonException {
LOG.info("delete(" + path + ")");
synchronized (mRoot) {
Inode inode = null;
try {
inode = getInode(path);
} catch (InvalidPathException e) {
return false;
}
if (inode == null) {
return true;
}
return delete(inode.getId(), recursive);
}
}
public long getBlockIdBasedOnOffset(int fileId, long offset) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist.");
}
if (!inode.isFile()) {
throw new FileDoesNotExistException(fileId + " is not a file.");
}
return ((InodeFile) inode).getBlockIdBasedOnOffset(offset);
}
}
public long getCapacityBytes() {
long ret = 0;
synchronized (mWorkers) {
for (MasterWorkerInfo worker : mWorkers.values()) {
ret += worker.getCapacityBytes();
}
}
return ret;
}
public ClientBlockInfo getClientBlockInfo(long blockId)
throws FileDoesNotExistException, IOException, BlockInfoException {
int fileId = BlockInfo.computeInodeId(blockId);
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null || inode.isDirectory()) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist.");
}
ClientBlockInfo ret =
((InodeFile) inode).getClientBlockInfo(BlockInfo.computeBlockIndex(blockId));
LOG.debug("getClientBlockInfo: " + blockId + ret);
return ret;
}
}
public ClientFileInfo getClientFileInfo(int fid) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fid);
if (inode == null) {
throw new FileDoesNotExistException("FileId " + fid + " does not exist.");
}
ClientFileInfo ret = inode.generateClientFileInfo(getPath(inode));
LOG.debug("getClientFileInfo(" + fid + "): " + ret);
return ret;
}
}
public ClientFileInfo getClientFileInfo(String path)
throws FileDoesNotExistException, InvalidPathException {
LOG.info("getClientFileInfo(" + path + ")");
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
return getClientFileInfo(inode.getId());
}
}
public ClientRawTableInfo getClientRawTableInfo(int id) throws TableDoesNotExistException {
LOG.info("getClientRawTableInfo(" + id + ")");
synchronized (mRoot) {
Inode inode = mInodes.get(id);
if (inode == null || inode.isFile() || !((InodeFolder) inode).isRawTable()) {
throw new TableDoesNotExistException("Table " + id + " does not exist.");
}
ClientRawTableInfo ret = new ClientRawTableInfo();
ret.id = inode.getId();
ret.name = inode.getName();
ret.path = getPath(inode);
ret.columns = ((InodeRawTable) inode).getColumns();
ret.metadata = ((InodeRawTable) inode).getMetadata();
return ret;
}
}
public ClientRawTableInfo getClientRawTableInfo(String path)
throws TableDoesNotExistException, InvalidPathException {
LOG.info("getClientRawTableInfo(" + path + ")");
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new TableDoesNotExistException(path);
}
return getClientRawTableInfo(inode.getId());
}
}
/**
* If the <code>path</code> is a directory, return all the direct entries in it. If the
* <code>path</code> is a file, return its ClientFileInfo.
* @param path the target directory/file path
* @return A list of ClientFileInfo
* @throws FileDoesNotExistException
* @throws InvalidPathException
*/
public List<ClientFileInfo> getFilesInfo(String path)
throws FileDoesNotExistException, InvalidPathException {
List<ClientFileInfo> ret = new ArrayList<ClientFileInfo>();
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isDirectory()) {
List<Integer> childernIds = ((InodeFolder) inode).getChildrenIds();
if (!path.endsWith("/")) {
path += "/";
}
synchronized (mRoot) {
for (int k : childernIds) {
ret.add(getClientFileInfo(k));
}
}
} else {
ret.add(getClientFileInfo(inode.getId()));
}
return ret;
}
public String getFileNameById(int fileId) throws FileDoesNotExistException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist");
}
return getPath(inode);
}
}
public List<ClientBlockInfo> getFileLocations(int fileId)
throws FileDoesNotExistException, IOException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null || inode.isDirectory()) {
throw new FileDoesNotExistException("FileId " + fileId + " does not exist.");
}
List<ClientBlockInfo> ret = ((InodeFile) inode).getClientBlockInfos();
LOG.debug("getFileLocations: " + fileId + ret);
return ret;
}
}
public List<ClientBlockInfo> getFileLocations(String path)
throws FileDoesNotExistException, InvalidPathException, IOException {
LOG.info("getFileLocations: " + path);
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
return getFileLocations(inode.getId());
}
}
/**
* Get the file id of the file.
* @param path The path of the file
* @return The file id of the file. -1 if the file does not exist.
* @throws InvalidPathException
*/
public int getFileId(String path) throws InvalidPathException {
LOG.debug("getFileId(" + path + ")");
Inode inode = getInode(path);
int ret = -1;
if (inode != null) {
ret = inode.getId();
}
LOG.debug("getFileId(" + path + "): " + ret);
return ret;
}
private Inode getInode(String path) throws InvalidPathException {
return getInode(getPathNames(path));
}
private Inode getInode(String[] pathNames) throws InvalidPathException {
if (pathNames == null || pathNames.length == 0) {
return null;
}
if (pathNames.length == 1) {
if (pathNames[0].equals("")) {
return mRoot;
} else {
LOG.info("InvalidPathException: File name starts with " + pathNames[0]);
throw new InvalidPathException("File name starts with " + pathNames[0]);
}
}
Inode cur = mRoot;
synchronized (mRoot) {
for (int k = 1; k < pathNames.length && cur != null; k ++) {
String name = pathNames[k];
if (cur.isFile()) {
return null;
}
cur = ((InodeFolder) cur).getChild(name, mInodes);
}
return cur;
}
}
/**
* Get absolute paths of all in memory files.
*
* @return absolute paths of all in memory files.
*/
public List<String> getInMemoryFiles() {
List<String> ret = new ArrayList<String>();
LOG.info("getInMemoryFiles()");
Queue<Pair<InodeFolder, String>> nodesQueue = new LinkedList<Pair<InodeFolder, String>>();
synchronized (mRoot) {
nodesQueue.add(new Pair<InodeFolder, String>(mRoot, ""));
while (!nodesQueue.isEmpty()) {
Pair<InodeFolder, String> tPair = nodesQueue.poll();
InodeFolder tFolder = tPair.getFirst();
String curPath = tPair.getSecond();
List<Integer> childrenIds = tFolder.getChildrenIds();
for (int id : childrenIds) {
Inode tInode = mInodes.get(id);
String newPath = curPath + Constants.PATH_SEPARATOR + tInode.getName();
if (tInode.isDirectory()) {
nodesQueue.add(new Pair<InodeFolder, String>((InodeFolder) tInode, newPath));
} else if (((InodeFile) tInode).isFullyInMemory()) {
ret.add(newPath);
}
}
}
}
return ret;
}
public InetSocketAddress getMasterAddress() {
return MASTER_ADDRESS;
}
private static String getName(String path) throws InvalidPathException {
String[] pathNames = getPathNames(path);
return pathNames[pathNames.length - 1];
}
public long getNewUserId() {
return mUserCounter.incrementAndGet();
}
public int getNumberOfFiles(String path) throws InvalidPathException, FileDoesNotExistException {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isFile()) {
return 1;
}
return ((InodeFolder) inode).getNumberOfChildren();
}
private String getPath(Inode inode) {
synchronized (mRoot) {
if (inode.getId() == 1) {
return "/";
}
if (inode.getParentId() == 1) {
return Constants.PATH_SEPARATOR + inode.getName();
}
return getPath(mInodes.get(inode.getParentId())) + Constants.PATH_SEPARATOR + inode.getName();
}
}
private static String[] getPathNames(String path) throws InvalidPathException {
CommonUtils.validatePath(path);
if (path.length() == 1 && path.equals(Constants.PATH_SEPARATOR)) {
String[] ret = new String[1];
ret[0] = "";
return ret;
}
return path.split(Constants.PATH_SEPARATOR);
}
public List<String> getPinList() {
return mPinList.getList();
}
public List<Integer> getPinIdList() {
synchronized (mFileIdPinList) {
List<Integer> ret = new ArrayList<Integer>();
for (int id : mFileIdPinList) {
ret.add(id);
}
return ret;
}
}
public int getRawTableId(String path) throws InvalidPathException {
Inode inode = getInode(path);
if (inode == null || inode.isFile() || !((InodeFolder) inode).isRawTable()) {
return -1;
}
return inode.getId();
}
public long getStarttimeMs() {
return START_TIME_MS;
}
public long getUnderFsCapacityBytes() throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(CommonConf.get().UNDERFS_DATA_FOLDER);
return ufs.getSpace(CommonConf.get().UNDERFS_DATA_FOLDER, SpaceType.SPACE_TOTAL);
}
public long getUnderFsUsedBytes() throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(CommonConf.get().UNDERFS_DATA_FOLDER);
return ufs.getSpace(CommonConf.get().UNDERFS_DATA_FOLDER, SpaceType.SPACE_USED);
}
public long getUnderFsFreeBytes() throws IOException {
UnderFileSystem ufs = UnderFileSystem.get(CommonConf.get().UNDERFS_DATA_FOLDER);
return ufs.getSpace(CommonConf.get().UNDERFS_DATA_FOLDER, SpaceType.SPACE_FREE);
}
public long getUsedBytes() {
long ret = 0;
synchronized (mWorkers) {
for (MasterWorkerInfo worker : mWorkers.values()) {
ret += worker.getUsedBytes();
}
}
return ret;
}
public NetAddress getWorker(boolean random, String host) {
synchronized (mWorkers) {
if (mWorkerAddressToId.isEmpty()) {
return null;
}
if (random) {
int index = new Random(mWorkerAddressToId.size()).nextInt(mWorkerAddressToId.size());
for (InetSocketAddress address: mWorkerAddressToId.keySet()) {
if (index == 0) {
LOG.debug("getRandomWorker: " + address);
return new NetAddress(address.getHostName(), address.getPort());
}
index --;
}
for (InetSocketAddress address: mWorkerAddressToId.keySet()) {
LOG.debug("getRandomWorker: " + address);
return new NetAddress(address.getHostName(), address.getPort());
}
} else {
for (InetSocketAddress address: mWorkerAddressToId.keySet()) {
if (address.getHostName().equals(host)
|| address.getAddress().getHostAddress().equals(host)
|| address.getAddress().getCanonicalHostName().equals(host)) {
LOG.debug("getLocalWorker: " + address);
return new NetAddress(address.getHostName(), address.getPort());
}
}
}
}
LOG.info("getLocalWorker: no local worker on " + host);
return null;
}
public int getWorkerCount() {
synchronized (mWorkers) {
return mWorkers.size();
}
}
private MasterWorkerInfo getWorkerInfo(long workerId) {
MasterWorkerInfo ret = null;
synchronized (mWorkers) {
ret = mWorkers.get(workerId);
if (ret == null) {
LOG.error("No worker: " + workerId);
}
}
return ret;
}
public List<ClientWorkerInfo> getWorkersInfo() {
List<ClientWorkerInfo> ret = new ArrayList<ClientWorkerInfo>();
synchronized (mWorkers) {
for (MasterWorkerInfo worker : mWorkers.values()) {
ret.add(worker.generateClientWorkerInfo());
}
}
return ret;
}
public List<String> getWhiteList() {
return mWhiteList.getList();
}
public List<Integer> listFiles(String path, boolean recursive)
throws InvalidPathException, FileDoesNotExistException {
List<Integer> ret = new ArrayList<Integer>();
synchronized (mRoot) {
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isFile()) {
ret.add(inode.getId());
} else if (recursive) {
Queue<Integer> queue = new LinkedList<Integer>();
queue.addAll(((InodeFolder) inode).getChildrenIds());
while (!queue.isEmpty()) {
int id = queue.poll();
inode = mInodes.get(id);
if (inode.isDirectory()) {
queue.addAll(((InodeFolder) inode).getChildrenIds());
} else {
ret.add(id);
}
}
}
}
return ret;
}
public List<String> ls(String path, boolean recursive)
throws InvalidPathException, FileDoesNotExistException {
List<String> ret = new ArrayList<String>();
Inode inode = getInode(path);
if (inode == null) {
throw new FileDoesNotExistException(path);
}
if (inode.isFile()) {
ret.add(path);
} else {
List<Integer> childernIds = ((InodeFolder) inode).getChildrenIds();
if (!path.endsWith("/")) {
path += "/";
}
ret.add(path);
synchronized (mRoot) {
for (int k : childernIds) {
inode = mInodes.get(k);
if (inode != null) {
if (recursive) {
ret.addAll(ls(path + inode.getName(), true));
} else {
ret.add(path + inode.getName());
}
}
}
}
}
return ret;
}
public boolean mkdir(String path)
throws FileAlreadyExistException, InvalidPathException, TachyonException {
try {
return createFile(true, path, true, -1, null, 0) > 0;
} catch (BlockInfoException e) {
throw new FileAlreadyExistException(e.getMessage());
}
}
public long registerWorker(NetAddress workerNetAddress, long totalBytes,
long usedBytes, List<Long> currentBlockIds) throws BlockInfoException {
long id = 0;
InetSocketAddress workerAddress =
new InetSocketAddress(workerNetAddress.mHost, workerNetAddress.mPort);
LOG.info("registerWorker(): WorkerNetAddress: " + workerAddress);
synchronized (mWorkers) {
if (mWorkerAddressToId.containsKey(workerAddress)) {
id = mWorkerAddressToId.get(workerAddress);
mWorkerAddressToId.remove(workerAddress);
LOG.warn("The worker " + workerAddress + " already exists as id " + id + ".");
}
if (id != 0 && mWorkers.containsKey(id)) {
MasterWorkerInfo tWorkerInfo = mWorkers.get(id);
mWorkers.remove(id);
mLostWorkers.add(tWorkerInfo);
LOG.warn("The worker with id " + id + " has been removed.");
}
id = START_TIME_NS_PREFIX + mWorkerCounter.incrementAndGet();
MasterWorkerInfo tWorkerInfo = new MasterWorkerInfo(id, workerAddress, totalBytes);
tWorkerInfo.updateUsedBytes(usedBytes);
tWorkerInfo.updateBlocks(true, currentBlockIds);
tWorkerInfo.updateLastUpdatedTimeMs();
mWorkers.put(id, tWorkerInfo);
mWorkerAddressToId.put(workerAddress, id);
LOG.info("registerWorker(): " + tWorkerInfo);
}
synchronized (mRoot) {
for (long blockId: currentBlockIds) {
int fileId = BlockInfo.computeInodeId(blockId);
int blockIndex = BlockInfo.computeBlockIndex(blockId);
Inode inode = mInodes.get(fileId);
if (inode != null && inode.isFile()) {
((InodeFile) inode).addLocation(blockIndex, id, workerNetAddress);
} else {
LOG.warn("registerWorker failed to add fileId " + fileId + " blockIndex " + blockIndex);
}
}
}
return id;
}
private void rename(Inode srcInode, String dstPath)
throws FileAlreadyExistException, InvalidPathException, FileDoesNotExistException {
if (getInode(dstPath) != null) {
throw new FileAlreadyExistException("Failed to rename: " + dstPath + " already exist");
}
String dstName = getName(dstPath);
String dstFolderPath = dstPath.substring(0, dstPath.length() - dstName.length() - 1);
// If we are renaming into the root folder
if (dstFolderPath.isEmpty()) {
dstFolderPath = "/";
}
Inode dstFolderInode = getInode(dstFolderPath);
if (dstFolderInode == null || dstFolderInode.isFile()) {
throw new FileDoesNotExistException("Failed to rename: " + dstFolderPath +
" does not exist.");
}
srcInode.setName(dstName);
InodeFolder parent = (InodeFolder) mInodes.get(srcInode.getParentId());
parent.removeChild(srcInode.getId());
srcInode.setParentId(dstFolderInode.getId());
((InodeFolder) dstFolderInode).addChild(srcInode.getId());
mJournal.getEditLog().rename(srcInode.getId(), dstPath);
mJournal.getEditLog().flush();
}
public void rename(int fileId, String dstPath)
throws FileDoesNotExistException, FileAlreadyExistException, InvalidPathException {
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("Failed to rename: " + fileId + " does not exist");
}
rename(inode, dstPath);
}
}
public void rename(String srcPath, String dstPath)
throws FileAlreadyExistException, FileDoesNotExistException, InvalidPathException {
synchronized (mRoot) {
Inode inode = getInode(srcPath);
if (inode == null) {
throw new FileDoesNotExistException("Failed to rename: " + srcPath + " does not exist");
}
rename(inode, dstPath);
}
}
public void unpinFile(int fileId) throws FileDoesNotExistException {
// TODO Change meta data only. Data will be evicted from worker based on data replacement
// policy. TODO May change it to be active from V0.2
LOG.info("unpinFile(" + fileId + ")");
synchronized (mRoot) {
Inode inode = mInodes.get(fileId);
if (inode == null) {
throw new FileDoesNotExistException("Failed to unpin " + fileId);
}
((InodeFile) inode).setPin(false);
synchronized (mFileIdPinList) {
mFileIdPinList.remove(fileId);
}
mJournal.getEditLog().unpinFile(fileId);
mJournal.getEditLog().flush();
}
}
public void updateRawTableMetadata(int tableId, ByteBuffer metadata)
throws TableDoesNotExistException, TachyonException {
synchronized (mRoot) {
Inode inode = mInodes.get(tableId);
if (inode == null || inode.getInodeType() != InodeType.RawTable) {
throw new TableDoesNotExistException("Table " + tableId + " does not exist.");
}
((InodeRawTable) inode).updateMetadata(metadata);
mJournal.getEditLog().updateRawTableMetadata(tableId, metadata);
mJournal.getEditLog().flush();
}
}
public Command workerHeartbeat(long workerId, long usedBytes, List<Long> removedBlockIds)
throws BlockInfoException {
LOG.debug("WorkerId: " + workerId);
synchronized (mRoot) {
synchronized (mWorkers) {
MasterWorkerInfo tWorkerInfo = mWorkers.get(workerId);
if (tWorkerInfo == null) {
LOG.info("worker_heartbeat(): Does not contain worker with ID " + workerId +
" . Send command to let it re-register.");
return new Command(CommandType.Register, new ArrayList<Long>());
}
tWorkerInfo.updateUsedBytes(usedBytes);
tWorkerInfo.updateBlocks(false, removedBlockIds);
tWorkerInfo.updateToRemovedBlocks(false, removedBlockIds);
tWorkerInfo.updateLastUpdatedTimeMs();
for (long blockId : removedBlockIds) {
int fileId = BlockInfo.computeInodeId(blockId);
int blockIndex = BlockInfo.computeBlockIndex(blockId);
Inode inode = mInodes.get(fileId);
if (inode == null) {
LOG.error("File " + fileId + " does not exist");
} else if (inode.isFile()) {
((InodeFile) inode).removeLocation(blockIndex, workerId);
LOG.debug("File " + fileId + " block " + blockIndex +
" was evicted from worker " + workerId);
}
}
List<Long> toRemovedBlocks = tWorkerInfo.getToRemovedBlocks();
if (toRemovedBlocks.size() != 0) {
return new Command(CommandType.Free, toRemovedBlocks);
}
}
}
return new Command(CommandType.Nothing, new ArrayList<Long>());
}
public void stop() {
mHeartbeatThread.shutdown();
}
}
|
make file creation and deletion sychronized
|
src/main/java/tachyon/MasterInfo.java
|
make file creation and deletion sychronized
|
|
Java
|
apache-2.0
|
3864e25fb9f2980aae5906e067dafc244ec38b32
| 0
|
ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,apixandru/intellij-community,apixandru/intellij-community,da1z/intellij-community,da1z/intellij-community,apixandru/intellij-community,xfournet/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,xfournet/intellij-community,apixandru/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,da1z/intellij-community,vvv1559/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,da1z/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,apixandru/intellij-community,allotria/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,allotria/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,da1z/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,xfournet/intellij-community,apixandru/intellij-community,vvv1559/intellij-community
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.dupLocator.index;
import com.intellij.codeInspection.*;
import com.intellij.dupLocator.DuplicatesProfile;
import com.intellij.dupLocator.DuplocateVisitor;
import com.intellij.dupLocator.DuplocatorState;
import com.intellij.dupLocator.LightDuplicateProfile;
import com.intellij.dupLocator.util.PsiFragment;
import com.intellij.lang.FileASTNode;
import com.intellij.lang.LighterAST;
import com.intellij.lang.LighterASTNode;
import com.intellij.lang.TreeBackedLighterAST;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.FileIndex;
import com.intellij.openapi.roots.GeneratedSourcesFilter;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.roots.TestSourcesFilter;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileWithId;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.tree.ILightStubFileElementType;
import com.intellij.util.SmartList;
import com.intellij.util.indexing.FileBasedIndex;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntIntHashMap;
import gnu.trove.TIntLongHashMap;
import gnu.trove.TIntObjectHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Iterator;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
public class DuplicatesInspectionBase extends LocalInspectionTool {
public boolean myFilterOutGeneratedCode;
private static final int MIN_FRAGMENT_SIZE = 3; // todo 3 statements constant
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull final PsiFile psiFile, @NotNull final InspectionManager manager, final boolean isOnTheFly) {
final VirtualFile virtualFile = psiFile.getVirtualFile();
if (!(virtualFile instanceof VirtualFileWithId) || /*!isOnTheFly || */!DuplicatesIndex.ourEnabled) return ProblemDescriptor.EMPTY_ARRAY;
final DuplicatesProfile profile = DuplicatesIndex.findDuplicatesProfile(psiFile.getFileType());
if (profile == null) return ProblemDescriptor.EMPTY_ARRAY;
final FileASTNode node = psiFile.getNode();
boolean usingLightProfile = profile instanceof LightDuplicateProfile &&
node.getElementType() instanceof ILightStubFileElementType &&
DuplicatesIndex.ourEnabledLightProfiles;
final Project project = psiFile.getProject();
DuplicatedCodeProcessor<?> processor;
if (usingLightProfile) {
processor = processLightDuplicates(node, virtualFile, (LightDuplicateProfile)profile, project);
}
else {
processor = processPsiDuplicates(psiFile, virtualFile, profile, project);
}
if (processor == null) return null;
final SmartList<ProblemDescriptor> descriptors = new SmartList<>();
final VirtualFile baseDir = project.getBaseDir();
for (Map.Entry<Integer, TextRange> entry : processor.reportedRanges.entrySet()) {
final Integer offset = entry.getKey();
if (!usingLightProfile && processor.fragmentSize.get(offset) < MIN_FRAGMENT_SIZE) continue;
final VirtualFile file = processor.reportedFiles.get(offset);
String path = null;
if (file.equals(virtualFile)) {
path = "this file";
}
else if (baseDir != null) {
path = VfsUtilCore.getRelativePath(file, baseDir);
}
if (path == null) {
path = file.getPath();
}
String message = "Found duplicated code in " + path;
PsiElement targetElement = processor.reportedPsi.get(offset);
TextRange rangeInElement = entry.getValue();
final int offsetInOtherFile = processor.reportedOffsetInOtherFiles.get(offset);
LocalQuickFix fix = createNavigateToDupeFix(file, offsetInOtherFile);
long hash = processor.fragmentHash.get(offset);
LocalQuickFix viewAllDupesFix = hash != 0 ? createShowOtherDupesFix(virtualFile, offset, (int)hash, (int)(hash >> 32)) : null;
ProblemDescriptor descriptor = manager
.createProblemDescriptor(targetElement, rangeInElement, message, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, fix,
viewAllDupesFix);
descriptors.add(descriptor);
}
return descriptors.isEmpty() ? null : descriptors.toArray(ProblemDescriptor.EMPTY_ARRAY);
}
private DuplicatedCodeProcessor<?> processLightDuplicates(FileASTNode node,
VirtualFile virtualFile,
LightDuplicateProfile profile,
Project project) {
final Ref<DuplicatedCodeProcessor<LighterASTNode>> processorRef = new Ref<>();
LighterAST lighterAST = node.getLighterAST();
profile.process(lighterAST, (hash, hash2, ast, nodes) -> {
DuplicatedCodeProcessor<LighterASTNode> processor = processorRef.get();
if (processor == null) {
processorRef.set(processor = new LightDuplicatedCodeProcessor((TreeBackedLighterAST)ast, virtualFile, project));
}
processor.process(hash, hash2, nodes[0]);
});
return processorRef.get();
}
private DuplicatedCodeProcessor<?> processPsiDuplicates(PsiFile psiFile,
VirtualFile virtualFile,
DuplicatesProfile profile,
Project project) {
final DuplocatorState state = profile.getDuplocatorState(psiFile.getLanguage());
final Ref<DuplicatedCodeProcessor<PsiFragment>> processorRef = new Ref<>();
DuplocateVisitor visitor = profile.createVisitor((hash, cost, frag) -> {
if (!DuplicatesIndex.isIndexedFragment(frag, cost, profile, state)) {
return;
}
DuplicatedCodeProcessor<PsiFragment> processor = processorRef.get();
if (processor == null) {
processorRef.set(processor = new OldDuplicatedCodeProcessor(virtualFile, project));
}
processor.process(hash, 0, frag);
}, true);
visitor.visitNode(psiFile);
return processorRef.get();
}
protected LocalQuickFix createNavigateToDupeFix(@NotNull VirtualFile file, int offsetInOtherFile) {
return null;
}
protected LocalQuickFix createShowOtherDupesFix(VirtualFile file, int offset, int hash, int hash2) {
return null;
}
private class LightDuplicatedCodeProcessor extends DuplicatedCodeProcessor<LighterASTNode> {
private TreeBackedLighterAST myAst;
private LightDuplicatedCodeProcessor(@NotNull TreeBackedLighterAST ast, VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
myAst = ast;
}
@Override
protected TextRange getRangeInElement(LighterASTNode node) {
return null;
}
@Override
protected PsiElement getPsi(LighterASTNode node) {
return myAst.unwrap(node).getPsi();
}
@Override
protected int getStartOffset(LighterASTNode node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(LighterASTNode node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return true;
}
}
class OldDuplicatedCodeProcessor extends DuplicatedCodeProcessor<PsiFragment> {
private OldDuplicatedCodeProcessor(VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
}
@Override
protected TextRange getRangeInElement(PsiFragment node) {
PsiElement[] elements = node.getElements();
TextRange rangeInElement = null;
if (elements.length > 1) {
PsiElement lastElement = elements[elements.length - 1];
rangeInElement = new TextRange(
elements[0].getStartOffsetInParent(),
lastElement.getStartOffsetInParent() + lastElement.getTextLength()
);
}
return rangeInElement;
}
@Override
protected PsiElement getPsi(PsiFragment node) {
PsiElement[] elements = node.getElements();
return elements.length > 1 ? elements[0].getParent() : elements[0];
}
@Override
protected int getStartOffset(PsiFragment node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(PsiFragment node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return false;
}
}
abstract static class DuplicatedCodeProcessor<T> implements FileBasedIndex.ValueProcessor<TIntArrayList> {
final TreeMap<Integer, TextRange> reportedRanges = new TreeMap<>();
final TIntObjectHashMap<VirtualFile> reportedFiles = new TIntObjectHashMap<>();
final TIntObjectHashMap<PsiElement> reportedPsi = new TIntObjectHashMap<>();
final TIntIntHashMap reportedOffsetInOtherFiles = new TIntIntHashMap();
final TIntIntHashMap fragmentSize = new TIntIntHashMap();
final TIntLongHashMap fragmentHash = new TIntLongHashMap();
final VirtualFile virtualFile;
final Project project;
final FileIndex myFileIndex;
final boolean mySkipGeneratedCode;
final boolean myFileWithinGeneratedCode;
T myNode;
int myHash;
int myHash2;
DuplicatedCodeProcessor(VirtualFile file, Project project, boolean skipGeneratedCode) {
virtualFile = file;
this.project = project;
myFileIndex = ProjectRootManager.getInstance(project).getFileIndex();
mySkipGeneratedCode = skipGeneratedCode;
myFileWithinGeneratedCode = skipGeneratedCode && GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(file, project);
}
void process(int hash, int hash2, T node) {
ProgressManager.checkCanceled();
myNode = node;
myHash = hash;
myHash2 = hash2;
FileBasedIndex.getInstance().processValues(DuplicatesIndex.NAME, hash, null, this, GlobalSearchScope.projectScope(project));
}
@Override
public boolean process(@NotNull VirtualFile file, TIntArrayList list) {
for(int i = 0, len = list.size(); i < len; i+=2) {
ProgressManager.checkCanceled();
if (list.getQuick(i + 1) != myHash2) continue;
int offset = list.getQuick(i);
if (myFileIndex.isInSourceContent(virtualFile)) {
if (!myFileIndex.isInSourceContent(file)) return true;
if (!TestSourcesFilter.isTestSources(virtualFile, project) && TestSourcesFilter.isTestSources(file, project)) return true;
if (mySkipGeneratedCode) {
if (!myFileWithinGeneratedCode && GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(file, project)) return true;
}
} else if (myFileIndex.isInSourceContent(file)) {
return true;
}
final int startOffset = getStartOffset(myNode);
final int endOffset = getEndOffset(myNode);
if (file.equals(virtualFile) && offset >= startOffset && offset < endOffset) continue;
PsiElement target = getPsi(myNode);
TextRange rangeInElement = getRangeInElement(myNode);
Integer fragmentStartOffsetInteger = startOffset;
SortedMap<Integer,TextRange> map = reportedRanges.subMap(fragmentStartOffsetInteger, endOffset);
int newFragmentSize = !map.isEmpty() ? 0:1;
Iterator<Integer> iterator = map.keySet().iterator();
while(iterator.hasNext()) {
Integer next = iterator.next();
iterator.remove();
reportedFiles.remove(next);
reportedOffsetInOtherFiles.remove(next);
reportedPsi.remove(next);
newFragmentSize += fragmentSize.remove(next);
}
reportedRanges.put(fragmentStartOffsetInteger, rangeInElement);
reportedFiles.put(fragmentStartOffsetInteger, file);
reportedOffsetInOtherFiles.put(fragmentStartOffsetInteger, offset);
reportedPsi.put(fragmentStartOffsetInteger, target);
fragmentSize.put(fragmentStartOffsetInteger, newFragmentSize);
if (newFragmentSize >= MIN_FRAGMENT_SIZE || isLightProfile()) {
fragmentHash.put(fragmentStartOffsetInteger, (myHash & 0xFFFFFFFFL) | ((long)myHash2 << 32));
}
return false;
}
return true;
}
protected abstract TextRange getRangeInElement(T node);
protected abstract PsiElement getPsi(T node);
protected abstract int getStartOffset(T node);
protected abstract int getEndOffset(T node);
protected abstract boolean isLightProfile();
}
}
|
platform/duplicates-analysis/src/com/intellij/dupLocator/index/DuplicatesInspectionBase.java
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.dupLocator.index;
import com.intellij.codeInspection.*;
import com.intellij.dupLocator.DuplicatesProfile;
import com.intellij.dupLocator.DuplocatorState;
import com.intellij.dupLocator.LightDuplicateProfile;
import com.intellij.dupLocator.treeHash.FragmentsCollector;
import com.intellij.dupLocator.util.PsiFragment;
import com.intellij.lang.FileASTNode;
import com.intellij.lang.LighterAST;
import com.intellij.lang.LighterASTNode;
import com.intellij.lang.TreeBackedLighterAST;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.FileIndex;
import com.intellij.openapi.roots.GeneratedSourcesFilter;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.roots.TestSourcesFilter;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileWithId;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.tree.ILightStubFileElementType;
import com.intellij.util.SmartList;
import com.intellij.util.indexing.FileBasedIndex;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntIntHashMap;
import gnu.trove.TIntLongHashMap;
import gnu.trove.TIntObjectHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Iterator;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
public class DuplicatesInspectionBase extends LocalInspectionTool {
public boolean myFilterOutGeneratedCode;
private static final int MIN_FRAGMENT_SIZE = 3; // todo 3 statements constant
@Nullable
@Override
public ProblemDescriptor[] checkFile(@NotNull final PsiFile psiFile, @NotNull final InspectionManager manager, final boolean isOnTheFly) {
final VirtualFile virtualFile = psiFile.getVirtualFile();
if (!(virtualFile instanceof VirtualFileWithId) || /*!isOnTheFly || */!DuplicatesIndex.ourEnabled) return ProblemDescriptor.EMPTY_ARRAY;
final DuplicatesProfile profile = DuplicatesIndex.findDuplicatesProfile(psiFile.getFileType());
if (profile == null) return ProblemDescriptor.EMPTY_ARRAY;
final Ref<DuplicatedCodeProcessor> myProcessorRef = new Ref<>();
final FileASTNode node = psiFile.getNode();
boolean usingLightProfile = profile instanceof LightDuplicateProfile &&
node.getElementType() instanceof ILightStubFileElementType &&
DuplicatesIndex.ourEnabledLightProfiles;
if (usingLightProfile) {
LighterAST ast = node.getLighterAST();
((LightDuplicateProfile)profile).process(ast, new LightDuplicateProfile.Callback() {
DuplicatedCodeProcessor<LighterASTNode> myProcessor;
@Override
public void process(int hash, int hash2, @NotNull final LighterAST ast, @NotNull final LighterASTNode... nodes) {
class LightDuplicatedCodeProcessor extends DuplicatedCodeProcessor<LighterASTNode> {
private LightDuplicatedCodeProcessor(VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
}
@Override
protected TextRange getRangeInElement(LighterASTNode node) {
return null;
}
@Override
protected PsiElement getPsi(LighterASTNode node) {
return ((TreeBackedLighterAST)ast).unwrap(node).getPsi();
}
@Override
protected int getStartOffset(LighterASTNode node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(LighterASTNode node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return true;
}
}
if (myProcessor == null) {
myProcessor = new LightDuplicatedCodeProcessor(virtualFile, psiFile.getProject());
myProcessorRef.set(myProcessor);
}
myProcessor.process(hash, hash2, nodes[0]);
}
});
} else {
final DuplocatorState state = profile.getDuplocatorState(psiFile.getLanguage());
profile.createVisitor(new FragmentsCollector() {
DuplicatedCodeProcessor<PsiFragment> myProcessor;
@Override
public void add(int hash, final int cost, @Nullable final PsiFragment frag) {
if (!DuplicatesIndex.isIndexedFragment(frag, cost, profile, state)) {
return;
}
class OldDuplicatedCodeProcessor extends DuplicatedCodeProcessor<PsiFragment> {
private OldDuplicatedCodeProcessor(VirtualFile file, Project project) {
super(file, project, myFilterOutGeneratedCode);
}
@Override
protected TextRange getRangeInElement(PsiFragment node) {
PsiElement[] elements = node.getElements();
TextRange rangeInElement = null;
if (elements.length > 1) {
PsiElement lastElement = elements[elements.length - 1];
rangeInElement = new TextRange(
elements[0].getStartOffsetInParent(),
lastElement.getStartOffsetInParent() + lastElement.getTextLength()
);
}
return rangeInElement;
}
@Override
protected PsiElement getPsi(PsiFragment node) {
PsiElement[] elements = node.getElements();
return elements.length > 1 ? elements[0].getParent() : elements[0];
}
@Override
protected int getStartOffset(PsiFragment node) {
return node.getStartOffset();
}
@Override
protected int getEndOffset(PsiFragment node) {
return node.getEndOffset();
}
@Override
protected boolean isLightProfile() {
return false;
}
}
if (myProcessor == null) {
myProcessor = new OldDuplicatedCodeProcessor(virtualFile, psiFile.getProject());
myProcessorRef.set(myProcessor);
}
myProcessor.process(hash, 0, frag);
}
}, true).visitNode(psiFile);
}
DuplicatedCodeProcessor<?> processor = myProcessorRef.get();
final SmartList<ProblemDescriptor> descriptors = new SmartList<>();
if (processor != null) {
final VirtualFile baseDir = psiFile.getProject().getBaseDir();
for(Map.Entry<Integer, TextRange> entry:processor.reportedRanges.entrySet()) {
final Integer offset = entry.getKey();
if (!usingLightProfile && processor.fragmentSize.get(offset) < MIN_FRAGMENT_SIZE) continue;
final VirtualFile file = processor.reportedFiles.get(offset);
String path = null;
if (file.equals(virtualFile)) path = "this file";
else if (baseDir != null) {
path = VfsUtilCore.getRelativePath(file, baseDir);
}
if (path == null) {
path = file.getPath();
}
String message = "Found duplicated code in " + path;
PsiElement targetElement = processor.reportedPsi.get(offset);
TextRange rangeInElement = entry.getValue();
final int offsetInOtherFile = processor.reportedOffsetInOtherFiles.get(offset);
LocalQuickFix fix = createNavigateToDupeFix(file, offsetInOtherFile);
long hash = processor.fragmentHash.get(offset);
LocalQuickFix viewAllDupesFix = hash != 0 ? createShowOtherDupesFix(virtualFile, offset, (int)hash, (int)(hash >> 32), psiFile.getProject()) : null;
ProblemDescriptor descriptor = manager
.createProblemDescriptor(targetElement, rangeInElement, message, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, fix, viewAllDupesFix);
descriptors.add(descriptor);
}
}
return descriptors.isEmpty() ? null : descriptors.toArray(new ProblemDescriptor[descriptors.size()]);
}
protected LocalQuickFix createNavigateToDupeFix(@NotNull VirtualFile file, int offsetInOtherFile) {
return null;
}
protected LocalQuickFix createShowOtherDupesFix(VirtualFile file, int offset, int hash, int hash2, Project project) {
return null;
}
abstract static class DuplicatedCodeProcessor<T> implements FileBasedIndex.ValueProcessor<TIntArrayList> {
final TreeMap<Integer, TextRange> reportedRanges = new TreeMap<>();
final TIntObjectHashMap<VirtualFile> reportedFiles = new TIntObjectHashMap<>();
final TIntObjectHashMap<PsiElement> reportedPsi = new TIntObjectHashMap<>();
final TIntIntHashMap reportedOffsetInOtherFiles = new TIntIntHashMap();
final TIntIntHashMap fragmentSize = new TIntIntHashMap();
final TIntLongHashMap fragmentHash = new TIntLongHashMap();
final VirtualFile virtualFile;
final Project project;
final FileIndex myFileIndex;
final boolean mySkipGeneratedCode;
final boolean myFileWithinGeneratedCode;
T myNode;
int myHash;
int myHash2;
DuplicatedCodeProcessor(VirtualFile file, Project project, boolean skipGeneratedCode) {
virtualFile = file;
this.project = project;
myFileIndex = ProjectRootManager.getInstance(project).getFileIndex();
mySkipGeneratedCode = skipGeneratedCode;
myFileWithinGeneratedCode = skipGeneratedCode && GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(file, project);
}
void process(int hash, int hash2, T node) {
ProgressManager.checkCanceled();
myNode = node;
myHash = hash;
myHash2 = hash2;
FileBasedIndex.getInstance().processValues(DuplicatesIndex.NAME, hash, null, this, GlobalSearchScope.projectScope(project));
}
@Override
public boolean process(@NotNull VirtualFile file, TIntArrayList list) {
for(int i = 0, len = list.size(); i < len; i+=2) {
ProgressManager.checkCanceled();
if (list.getQuick(i + 1) != myHash2) continue;
int offset = list.getQuick(i);
if (myFileIndex.isInSourceContent(virtualFile)) {
if (!myFileIndex.isInSourceContent(file)) return true;
if (!TestSourcesFilter.isTestSources(virtualFile, project) && TestSourcesFilter.isTestSources(file, project)) return true;
if (mySkipGeneratedCode) {
if (!myFileWithinGeneratedCode && GeneratedSourcesFilter.isGeneratedSourceByAnyFilter(file, project)) return true;
}
} else if (myFileIndex.isInSourceContent(file)) {
return true;
}
final int startOffset = getStartOffset(myNode);
final int endOffset = getEndOffset(myNode);
if (file.equals(virtualFile) && offset >= startOffset && offset < endOffset) continue;
PsiElement target = getPsi(myNode);
TextRange rangeInElement = getRangeInElement(myNode);
Integer fragmentStartOffsetInteger = startOffset;
SortedMap<Integer,TextRange> map = reportedRanges.subMap(fragmentStartOffsetInteger, endOffset);
int newFragmentSize = !map.isEmpty() ? 0:1;
Iterator<Integer> iterator = map.keySet().iterator();
while(iterator.hasNext()) {
Integer next = iterator.next();
iterator.remove();
reportedFiles.remove(next);
reportedOffsetInOtherFiles.remove(next);
reportedPsi.remove(next);
newFragmentSize += fragmentSize.remove(next);
}
reportedRanges.put(fragmentStartOffsetInteger, rangeInElement);
reportedFiles.put(fragmentStartOffsetInteger, file);
reportedOffsetInOtherFiles.put(fragmentStartOffsetInteger, offset);
reportedPsi.put(fragmentStartOffsetInteger, target);
fragmentSize.put(fragmentStartOffsetInteger, newFragmentSize);
if (newFragmentSize >= MIN_FRAGMENT_SIZE || isLightProfile()) {
fragmentHash.put(fragmentStartOffsetInteger, (myHash & 0xFFFFFFFFL) | ((long)myHash2 << 32));
}
return false;
}
return true;
}
protected abstract TextRange getRangeInElement(T node);
protected abstract PsiElement getPsi(T node);
protected abstract int getStartOffset(T node);
protected abstract int getEndOffset(T node);
protected abstract boolean isLightProfile();
}
}
|
Java: Refactored DuplicatesInspection to increase readability - moved local classes to inner (IDEA-144957)
|
platform/duplicates-analysis/src/com/intellij/dupLocator/index/DuplicatesInspectionBase.java
|
Java: Refactored DuplicatesInspection to increase readability - moved local classes to inner (IDEA-144957)
|
|
Java
|
apache-2.0
|
8afc83e89a233a17275cf4d883efc4b2031569a7
| 0
|
OpenHFT/Chronicle-Wire,OpenHFT/Chronicle-Wire
|
/*
* Copyright (C) 2015 higherfrequencytrading.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.openhft.chronicle.wire;
import net.openhft.chronicle.bytes.Bytes;
import net.openhft.chronicle.bytes.ref.BinaryLongArrayReference;
import net.openhft.chronicle.bytes.ref.BinaryLongReference;
import net.openhft.chronicle.bytes.ref.TextLongArrayReference;
import net.openhft.chronicle.bytes.ref.TextLongReference;
import net.openhft.chronicle.core.io.IOTools;
import net.openhft.chronicle.core.values.LongArrayValues;
import net.openhft.chronicle.core.values.LongValue;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* A selection of prebuilt wire types.
*/
public enum WireType implements Function<Bytes, Wire> {
TEXT {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new TextWire(bytes);
}
@Override
public Supplier<LongValue> newLongReference() {
return TextLongReference::new;
}
@Override
public Supplier<LongArrayValues> newLongArrayReference() {
return TextLongArrayReference::new;
}
}, BINARY {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new BinaryWire(bytes);
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, FIELDLESS_BINARY {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new BinaryWire(bytes, false, false, true, Integer.MAX_VALUE, "binary");
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, COMPRESSED_BINARY {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new BinaryWire(bytes, false, false, false, COMPRESSED_SIZE, "lzw");
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, JSON {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new JSONWire(bytes);
}
}, RAW {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new RawWire(bytes);
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, CSV {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new CSVWire(bytes);
}
},
READ_ANY {
@Override
public Wire apply(@NotNull Bytes bytes) {
int code = bytes.readByte(0);
if (code >= ' ' && code < 127)
return TEXT.apply(bytes);
if (BinaryWireCode.isFieldCode(code))
return FIELDLESS_BINARY.apply(bytes);
return BINARY.apply(bytes);
}
};
static final ThreadLocal<Bytes> bytesTL = ThreadLocal.withInitial(Bytes::allocateElasticDirect);
private static final int COMPRESSED_SIZE = Integer.getInteger("WireType.compressedSize", 128);
static Bytes getBytes() {
Bytes bytes = bytesTL.get();
bytes.clear();
return bytes;
}
public Supplier<LongValue> newLongReference() {
return BinaryLongReference::new;
}
public Supplier<LongArrayValues> newLongArrayReference() {
return BinaryLongArrayReference::new;
}
public String asString(WriteMarshallable marshallable) {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
wire.getValueOut().typedMarshallable(marshallable);
return bytes.toString();
}
public <T> T fromString(CharSequence cs) {
Bytes bytes = getBytes();
bytes.appendUtf8(cs);
Wire wire = apply(bytes);
return wire.getValueIn().typedMarshallable();
}
public <T> T fromFile(String filename) throws IOException {
return (T) (apply(Bytes.wrapForRead(IOTools.readFile(filename))).getValueIn().typedMarshallable());
}
public <T> Map<String, T> fromFileAsMap(String filename, Class<T> tClass) throws IOException {
Map<String, T> map = new LinkedHashMap<>();
Wire wire = apply(Bytes.wrapForRead(IOTools.readFile(filename)));
StringBuilder sb = new StringBuilder();
while (wire.hasMore()) {
wire.readEventName(sb)
.object(tClass, map, (m, o) -> m.put(sb.toString(), o));
}
return map;
}
public <T extends Marshallable> void toFileAsMap(String filename, Map<String, T> map) throws IOException {
toFileAsMap(filename, map, false);
}
public <T extends Marshallable> void toFileAsMap(String filename, Map<String, T> map, boolean compact) throws IOException {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
for (Map.Entry<String, T> entry : map.entrySet()) {
ValueOut valueOut = wire.writeEventName(entry::getKey);
if (compact)
valueOut.leaf();
valueOut.marshallable(entry.getValue());
}
String tempFilename = IOTools.tempName(filename);
IOTools.writeFile(tempFilename, bytes.toByteArray());
File file2 = new File(tempFilename);
if (!file2.renameTo(new File(filename))) {
file2.delete();
throw new IOException("Failed to rename " + tempFilename + " to " + filename);
}
}
public <T> void toFile(String filename, WriteMarshallable marshallable) throws IOException {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
wire.getValueOut().typedMarshallable(marshallable);
String tempFilename = IOTools.tempName(filename);
IOTools.writeFile(tempFilename, bytes.toByteArray());
File file2 = new File(tempFilename);
if (!file2.renameTo(new File(filename))) {
file2.delete();
throw new IOException("Failed to rename " + tempFilename + " to " + filename);
}
}
String asHexString(WriteMarshallable marshallable) {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
wire.getValueOut().typedMarshallable(marshallable);
return bytes.toHexString();
}
<T> T fromHexString(CharSequence s) {
Wire wire = apply(Bytes.fromHexString(s.toString()));
return wire.getValueIn().typedMarshallable();
}
}
|
src/main/java/net/openhft/chronicle/wire/WireType.java
|
/*
* Copyright (C) 2015 higherfrequencytrading.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.openhft.chronicle.wire;
import net.openhft.chronicle.bytes.Bytes;
import net.openhft.chronicle.bytes.ref.BinaryLongArrayReference;
import net.openhft.chronicle.bytes.ref.BinaryLongReference;
import net.openhft.chronicle.bytes.ref.TextLongArrayReference;
import net.openhft.chronicle.bytes.ref.TextLongReference;
import net.openhft.chronicle.core.io.IOTools;
import net.openhft.chronicle.core.values.LongArrayValues;
import net.openhft.chronicle.core.values.LongValue;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* A selection of prebuilt wire types.
*/
public enum WireType implements Function<Bytes, Wire> {
TEXT {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new TextWire(bytes);
}
@Override
public Supplier<LongValue> newLongReference() {
return TextLongReference::new;
}
@Override
public Supplier<LongArrayValues> newLongArrayReference() {
return TextLongArrayReference::new;
}
}, BINARY {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new BinaryWire(bytes);
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, FIELDLESS_BINARY {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new BinaryWire(bytes, false, false, true, Integer.MAX_VALUE, "binary");
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, COMPRESSED_BINARY {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new BinaryWire(bytes, false, false, false, COMPRESSED_SIZE, "lzw");
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, JSON {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new JSONWire(bytes);
}
}, RAW {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new RawWire(bytes);
}
@Override
public String asString(WriteMarshallable marshallable) {
return asHexString(marshallable);
}
@Override
public <T> T fromString(CharSequence cs) {
return fromHexString(cs);
}
}, CSV {
@NotNull
@Override
public Wire apply(Bytes bytes) {
return new CSVWire(bytes);
}
},
READ_ANY {
@Override
public Wire apply(@NotNull Bytes bytes) {
int code = bytes.readByte(0);
if (code >= ' ' && code < 127)
return TEXT.apply(bytes);
if (BinaryWireCode.isFieldCode(code))
return FIELDLESS_BINARY.apply(bytes);
return BINARY.apply(bytes);
}
};
static final ThreadLocal<Bytes> bytesTL = ThreadLocal.withInitial(Bytes::allocateElasticDirect);
private static final int COMPRESSED_SIZE = Integer.getInteger("WireType.compressedSize", 128);
static Bytes getBytes() {
Bytes bytes = bytesTL.get();
bytes.clear();
return bytes;
}
public Supplier<LongValue> newLongReference() {
return BinaryLongReference::new;
}
public Supplier<LongArrayValues> newLongArrayReference() {
return BinaryLongArrayReference::new;
}
public String asString(WriteMarshallable marshallable) {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
wire.getValueOut().typedMarshallable(marshallable);
return bytes.toString();
}
public <T> T fromString(CharSequence cs) {
Bytes bytes = getBytes();
bytes.appendUtf8(cs);
Wire wire = apply(bytes);
return wire.getValueIn().typedMarshallable();
}
public <T> T fromFile(String filename) throws IOException {
return (T) (apply(Bytes.wrapForRead(IOTools.readFile(filename))).getValueIn().typedMarshallable());
}
public <T> Map<String, T> fromFileAsMap(String filename, Class<T> tClass) throws IOException {
Map<String, T> map = new LinkedHashMap<>();
Wire wire = apply(Bytes.wrapForRead(IOTools.readFile(filename)));
StringBuilder sb = new StringBuilder();
while (wire.hasMore()) {
wire.readEventName(sb)
.object(tClass, map, (m, o) -> m.put(sb.toString(), o));
}
return map;
}
public <T extends Marshallable> void toFile(String filename, Map<String, T> map) throws IOException {
toFile(filename, map, false);
}
public <T extends Marshallable> void toFile(String filename, Map<String, T> map, boolean compact) throws IOException {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
for (Map.Entry<String, T> entry : map.entrySet()) {
ValueOut valueOut = wire.writeEventName(entry::getKey);
if (compact)
valueOut.leaf();
valueOut.marshallable(entry.getValue());
}
String tempFilename = IOTools.tempName(filename);
IOTools.writeFile(tempFilename, bytes.toByteArray());
File file2 = new File(tempFilename);
if (!file2.renameTo(new File(filename))) {
file2.delete();
throw new IOException("Failed to rename " + tempFilename + " to " + filename);
}
}
public <T> void toFile(String filename, WriteMarshallable marshallable) throws IOException {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
wire.getValueOut().typedMarshallable(marshallable);
String tempFilename = IOTools.tempName(filename);
IOTools.writeFile(tempFilename, bytes.toByteArray());
File file2 = new File(tempFilename);
if (!file2.renameTo(new File(filename))) {
file2.delete();
throw new IOException("Failed to rename " + tempFilename + " to " + filename);
}
}
String asHexString(WriteMarshallable marshallable) {
Bytes bytes = getBytes();
Wire wire = apply(bytes);
wire.getValueOut().typedMarshallable(marshallable);
return bytes.toHexString();
}
<T> T fromHexString(CharSequence s) {
Wire wire = apply(Bytes.fromHexString(s.toString()));
return wire.getValueIn().typedMarshallable();
}
}
|
add toFileAsMap
|
src/main/java/net/openhft/chronicle/wire/WireType.java
|
add toFileAsMap
|
|
Java
|
apache-2.0
|
5fcb80737d07bd9f0520abc27a34008e69ad3a9a
| 0
|
milaq/ServeStream,zyjiang08/servestream,zyjiang08/servestream,yomguy/servestream,yomguy/servestream,zyjiang08/servestream,milaq/ServeStream,milaq/ServeStream,yomguy/servestream,milaq/ServeStream,yomguy/servestream,milaq/ServeStream,zyjiang08/servestream,yomguy/servestream,zyjiang08/servestream
|
/*
* ServeStream: A HTTP stream browser/player for Android
* Copyright 2010 William Seemann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.servestream.alarm;
import java.net.MalformedURLException;
import net.sourceforge.servestream.R;
import net.sourceforge.servestream.dbutils.Stream;
import net.sourceforge.servestream.dbutils.StreamDatabase;
import net.sourceforge.servestream.utils.MediaFile;
import net.sourceforge.servestream.utils.PlaylistParser;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.res.AssetFileDescriptor;
import android.content.res.Resources;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Vibrator;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
/**
* Manages alarms and vibe. Runs as a service so that it can continue to play
* if another activity overrides the AlarmAlert dialog.
*/
public class AlarmKlaxon extends Service {
private static final String TAG = AlarmKlaxon.class.getName();
/** Play alarm up to 20 minutes before silencing */
private static final int ALARM_TIMEOUT_SECONDS = 20 * 60;
private static final long[] sVibratePattern = new long[] { 500, 500 };
private boolean mPlaying = false;
private Vibrator mVibrator;
private MediaPlayer mMediaPlayer;
private Alarm mCurrentAlarm;
private long mStartTime;
private TelephonyManager mTelephonyManager;
private int mInitialCallState;
// Internal messages
private static final int KILLER = 1000;
private Handler mHandler = new Handler() {
public void handleMessage(Message msg) {
switch (msg.what) {
case KILLER:
Log.v(TAG, "*********** Alarm killer triggered ***********");
sendKillBroadcast((Alarm) msg.obj);
stopSelf();
break;
}
}
};
private PhoneStateListener mPhoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String ignored) {
// The user might already be in a call when the alarm fires. When
// we register onCallStateChanged, we get the initial in-call state
// which kills the alarm. Check against the initial call state so
// we don't kill the alarm during a call.
if (state != TelephonyManager.CALL_STATE_IDLE
&& state != mInitialCallState) {
sendKillBroadcast(mCurrentAlarm);
stopSelf();
}
}
};
@Override
public void onCreate() {
mVibrator = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE);
// Listen for incoming calls to kill the alarm.
mTelephonyManager =
(TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
mTelephonyManager.listen(
mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
AlarmAlertWakeLock.acquireCpuWakeLock(this);
}
@Override
public void onDestroy() {
stop();
// Stop listening for incoming calls.
mTelephonyManager.listen(mPhoneStateListener, 0);
AlarmAlertWakeLock.releaseCpuLock();
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
// No intent, tell the system not to restart us.
if (intent == null) {
stopSelf();
return START_NOT_STICKY;
}
final Alarm alarm = intent.getParcelableExtra(
Alarms.ALARM_INTENT_EXTRA);
if (alarm == null) {
Log.v(TAG, "AlarmKlaxon failed to parse the alarm from the intent");
stopSelf();
return START_NOT_STICKY;
}
if (mCurrentAlarm != null) {
sendKillBroadcast(mCurrentAlarm);
}
play(alarm);
mCurrentAlarm = alarm;
// Record the initial call state here so that the new alarm has the
// newest state.
mInitialCallState = mTelephonyManager.getCallState();
return START_STICKY;
}
private void sendKillBroadcast(Alarm alarm) {
long millis = System.currentTimeMillis() - mStartTime;
int minutes = (int) Math.round(millis / 60000.0);
Intent alarmKilled = new Intent(Alarms.ALARM_KILLED);
alarmKilled.putExtra(Alarms.ALARM_INTENT_EXTRA, alarm);
alarmKilled.putExtra(Alarms.ALARM_KILLED_TIMEOUT, minutes);
sendBroadcast(alarmKilled);
}
// Volume suggested by media team for in-call alarms.
private static final float IN_CALL_VOLUME = 0.125f;
private void play(Alarm alarm) {
// stop() checks to see if we are already playing.
stop();
Log.v(TAG, "AlarmKlaxon.play() " + alarm.id + " alert " + alarm.alert);
if (!alarm.silent) {
StreamDatabase streamdb = new StreamDatabase(this);
Stream stream = streamdb.findStream(alarm.alert);
streamdb.close();
Uri alert = null;
if (stream != null)
alert = stream.getUri();
// Fall back on the default alarm if the database does not have an
// alarm stored.
if (alert == null) {
alert = RingtoneManager.getDefaultUri(
RingtoneManager.TYPE_ALARM);
Log.v(TAG, "Using default alarm: " + alert.toString());
}
// TODO: Reuse mMediaPlayer instead of creating a new one and/or use
// RingtoneManager.
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setOnErrorListener(new OnErrorListener() {
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.e(TAG, "Error occurred while playing audio.");
mp.stop();
mp.release();
mMediaPlayer = null;
return true;
}
});
mMediaPlayer.setOnPreparedListener(new OnPreparedListener() {
public void onPrepared(MediaPlayer mp) {
mMediaPlayer.start();
}
});
try {
// Check if we are in a call. If we are, use the in-call alarm
// resource at a low volume to not disrupt the call.
if (mTelephonyManager.getCallState()
!= TelephonyManager.CALL_STATE_IDLE) {
Log.v(TAG, "Using the in-call alarm");
mMediaPlayer.setVolume(IN_CALL_VOLUME, IN_CALL_VOLUME);
setDataSourceFromResource(getResources(), mMediaPlayer,
R.raw.in_call_alarm);
startAlarm(mMediaPlayer);
} else {
new ParsePlaylistAsyncTask().execute(alert.toString());
}
} catch (Exception ex) {
Log.v(TAG, "Using the fallback ringtone");
// The alert may be on the sd card which could be busy right
// now. Use the fallback ringtone.
try {
// Must reset the media player to clear the error state.
mMediaPlayer.reset();
setDataSourceFromResource(getResources(), mMediaPlayer,
R.raw.fallbackring);
startAlarm(mMediaPlayer);
} catch (Exception ex2) {
// At this point we just don't play anything.
Log.e(TAG, "Failed to play fallback ringtone", ex2);
}
}
}
/* Start the vibrator after everything is ok with the media player */
if (alarm.vibrate) {
mVibrator.vibrate(sVibratePattern, 0);
} else {
mVibrator.cancel();
}
enableKiller(alarm);
mPlaying = true;
mStartTime = System.currentTimeMillis();
}
// Do the common stuff when starting the alarm.
private void startAlarm(MediaPlayer player)
throws java.io.IOException, IllegalArgumentException,
IllegalStateException {
final AudioManager audioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
// do not play alarms if stream volume is 0
// (typically because ringer mode is silent).
if (audioManager.getStreamVolume(AudioManager.STREAM_ALARM) != 0) {
player.setAudioStreamType(AudioManager.STREAM_ALARM);
player.setLooping(true);
player.prepareAsync();
}
}
private void setDataSourceFromResource(Resources resources,
MediaPlayer player, int res) throws java.io.IOException {
AssetFileDescriptor afd = resources.openRawResourceFd(res);
if (afd != null) {
player.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(),
afd.getLength());
afd.close();
}
}
/**
* Stops alarm audio and disables alarm if it not snoozed and not
* repeating
*/
public void stop() {
Log.v(TAG, "AlarmKlaxon.stop()");
if (mPlaying) {
mPlaying = false;
Intent alarmDone = new Intent(Alarms.ALARM_DONE_ACTION);
sendBroadcast(alarmDone);
// Stop audio playing
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
}
// Stop vibrator
mVibrator.cancel();
}
disableKiller();
}
/**
* Kills alarm audio after ALARM_TIMEOUT_SECONDS, so the alarm
* won't run all day.
*
* This just cancels the audio, but leaves the notification
* popped, so the user will know that the alarm tripped.
*/
private void enableKiller(Alarm alarm) {
mHandler.sendMessageDelayed(mHandler.obtainMessage(KILLER, alarm),
1000 * ALARM_TIMEOUT_SECONDS);
}
private void disableKiller() {
mHandler.removeMessages(KILLER);
}
public class ParsePlaylistAsyncTask extends AsyncTask<String, Void, MediaFile> {
public ParsePlaylistAsyncTask() {
super();
}
@Override
protected void onPreExecute() {
}
@Override
protected MediaFile doInBackground(String... filename) {
MediaFile [] mPlayListFiles = null;
try {
Stream stream = new Stream(filename[0]);
PlaylistParser playlist = PlaylistParser.getPlaylistParser(stream.getURL());
if (playlist != null) {
playlist.retrieveAndParsePlaylist();
mPlayListFiles = playlist.getPlaylistFiles();
} else {
mPlayListFiles = new MediaFile[1];
MediaFile mediaFile = new MediaFile();
mediaFile.setURL(stream.getURL().toString());
mediaFile.setTrackNumber(1);
mPlayListFiles[0] = mediaFile;
}
} catch (MalformedURLException ex) {
ex.printStackTrace();
}
return mPlayListFiles[0];
}
@Override
protected void onPostExecute(MediaFile mediaFile) {
try {
mMediaPlayer.setDataSource(mediaFile.getURL().toString());
startAlarm(mMediaPlayer);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
|
src/net/sourceforge/servestream/alarm/AlarmKlaxon.java
|
/*
* ServeStream: A HTTP stream browser/player for Android
* Copyright 2010 William Seemann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.servestream.alarm;
import java.net.MalformedURLException;
import net.sourceforge.servestream.R;
import net.sourceforge.servestream.dbutils.Stream;
import net.sourceforge.servestream.dbutils.StreamDatabase;
import net.sourceforge.servestream.utils.MediaFile;
import net.sourceforge.servestream.utils.PlaylistParser;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.res.AssetFileDescriptor;
import android.content.res.Resources;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Vibrator;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
/**
* Manages alarms and vibe. Runs as a service so that it can continue to play
* if another activity overrides the AlarmAlert dialog.
*/
public class AlarmKlaxon extends Service {
private static final String TAG = AlarmKlaxon.class.getName();
/** Play alarm up to 10 minutes before silencing */
private static final int ALARM_TIMEOUT_SECONDS = 10 * 60;
private static final long[] sVibratePattern = new long[] { 500, 500 };
private boolean mPlaying = false;
private Vibrator mVibrator;
private MediaPlayer mMediaPlayer;
private Alarm mCurrentAlarm;
private long mStartTime;
private TelephonyManager mTelephonyManager;
private int mInitialCallState;
// Internal messages
private static final int KILLER = 1000;
private Handler mHandler = new Handler() {
public void handleMessage(Message msg) {
switch (msg.what) {
case KILLER:
Log.v(TAG, "*********** Alarm killer triggered ***********");
sendKillBroadcast((Alarm) msg.obj);
stopSelf();
break;
}
}
};
private PhoneStateListener mPhoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String ignored) {
// The user might already be in a call when the alarm fires. When
// we register onCallStateChanged, we get the initial in-call state
// which kills the alarm. Check against the initial call state so
// we don't kill the alarm during a call.
if (state != TelephonyManager.CALL_STATE_IDLE
&& state != mInitialCallState) {
sendKillBroadcast(mCurrentAlarm);
stopSelf();
}
}
};
@Override
public void onCreate() {
mVibrator = (Vibrator) getSystemService(Context.VIBRATOR_SERVICE);
// Listen for incoming calls to kill the alarm.
mTelephonyManager =
(TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
mTelephonyManager.listen(
mPhoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
AlarmAlertWakeLock.acquireCpuWakeLock(this);
}
@Override
public void onDestroy() {
stop();
// Stop listening for incoming calls.
mTelephonyManager.listen(mPhoneStateListener, 0);
AlarmAlertWakeLock.releaseCpuLock();
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
// No intent, tell the system not to restart us.
if (intent == null) {
stopSelf();
return START_NOT_STICKY;
}
final Alarm alarm = intent.getParcelableExtra(
Alarms.ALARM_INTENT_EXTRA);
if (alarm == null) {
Log.v(TAG, "AlarmKlaxon failed to parse the alarm from the intent");
stopSelf();
return START_NOT_STICKY;
}
if (mCurrentAlarm != null) {
sendKillBroadcast(mCurrentAlarm);
}
play(alarm);
mCurrentAlarm = alarm;
// Record the initial call state here so that the new alarm has the
// newest state.
mInitialCallState = mTelephonyManager.getCallState();
return START_STICKY;
}
private void sendKillBroadcast(Alarm alarm) {
long millis = System.currentTimeMillis() - mStartTime;
int minutes = (int) Math.round(millis / 60000.0);
Intent alarmKilled = new Intent(Alarms.ALARM_KILLED);
alarmKilled.putExtra(Alarms.ALARM_INTENT_EXTRA, alarm);
alarmKilled.putExtra(Alarms.ALARM_KILLED_TIMEOUT, minutes);
sendBroadcast(alarmKilled);
}
// Volume suggested by media team for in-call alarms.
private static final float IN_CALL_VOLUME = 0.125f;
private void play(Alarm alarm) {
// stop() checks to see if we are already playing.
stop();
Log.v(TAG, "AlarmKlaxon.play() " + alarm.id + " alert " + alarm.alert);
if (!alarm.silent) {
StreamDatabase streamdb = new StreamDatabase(this);
Stream stream = streamdb.findStream(alarm.alert);
streamdb.close();
Uri alert = null;
if (stream != null)
alert = stream.getUri();
// Fall back on the default alarm if the database does not have an
// alarm stored.
if (alert == null) {
alert = RingtoneManager.getDefaultUri(
RingtoneManager.TYPE_ALARM);
Log.v(TAG, "Using default alarm: " + alert.toString());
}
// TODO: Reuse mMediaPlayer instead of creating a new one and/or use
// RingtoneManager.
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setOnErrorListener(new OnErrorListener() {
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.e(TAG, "Error occurred while playing audio.");
mp.stop();
mp.release();
mMediaPlayer = null;
return true;
}
});
mMediaPlayer.setOnPreparedListener(new OnPreparedListener() {
public void onPrepared(MediaPlayer mp) {
mMediaPlayer.start();
}
});
try {
// Check if we are in a call. If we are, use the in-call alarm
// resource at a low volume to not disrupt the call.
if (mTelephonyManager.getCallState()
!= TelephonyManager.CALL_STATE_IDLE) {
Log.v(TAG, "Using the in-call alarm");
mMediaPlayer.setVolume(IN_CALL_VOLUME, IN_CALL_VOLUME);
setDataSourceFromResource(getResources(), mMediaPlayer,
R.raw.in_call_alarm);
startAlarm(mMediaPlayer);
} else {
new ParsePlaylistAsyncTask().execute(alert.toString());
}
} catch (Exception ex) {
Log.v(TAG, "Using the fallback ringtone");
// The alert may be on the sd card which could be busy right
// now. Use the fallback ringtone.
try {
// Must reset the media player to clear the error state.
mMediaPlayer.reset();
setDataSourceFromResource(getResources(), mMediaPlayer,
R.raw.fallbackring);
startAlarm(mMediaPlayer);
} catch (Exception ex2) {
// At this point we just don't play anything.
Log.e(TAG, "Failed to play fallback ringtone", ex2);
}
}
}
/* Start the vibrator after everything is ok with the media player */
if (alarm.vibrate) {
mVibrator.vibrate(sVibratePattern, 0);
} else {
mVibrator.cancel();
}
enableKiller(alarm);
mPlaying = true;
mStartTime = System.currentTimeMillis();
}
// Do the common stuff when starting the alarm.
private void startAlarm(MediaPlayer player)
throws java.io.IOException, IllegalArgumentException,
IllegalStateException {
final AudioManager audioManager = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
// do not play alarms if stream volume is 0
// (typically because ringer mode is silent).
if (audioManager.getStreamVolume(AudioManager.STREAM_ALARM) != 0) {
player.setAudioStreamType(AudioManager.STREAM_ALARM);
player.setLooping(true);
player.prepareAsync();
}
}
private void setDataSourceFromResource(Resources resources,
MediaPlayer player, int res) throws java.io.IOException {
AssetFileDescriptor afd = resources.openRawResourceFd(res);
if (afd != null) {
player.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(),
afd.getLength());
afd.close();
}
}
/**
* Stops alarm audio and disables alarm if it not snoozed and not
* repeating
*/
public void stop() {
Log.v(TAG, "AlarmKlaxon.stop()");
if (mPlaying) {
mPlaying = false;
Intent alarmDone = new Intent(Alarms.ALARM_DONE_ACTION);
sendBroadcast(alarmDone);
// Stop audio playing
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
}
// Stop vibrator
mVibrator.cancel();
}
disableKiller();
}
/**
* Kills alarm audio after ALARM_TIMEOUT_SECONDS, so the alarm
* won't run all day.
*
* This just cancels the audio, but leaves the notification
* popped, so the user will know that the alarm tripped.
*/
private void enableKiller(Alarm alarm) {
mHandler.sendMessageDelayed(mHandler.obtainMessage(KILLER, alarm),
1000 * ALARM_TIMEOUT_SECONDS);
}
private void disableKiller() {
mHandler.removeMessages(KILLER);
}
public class ParsePlaylistAsyncTask extends AsyncTask<String, Void, MediaFile> {
public ParsePlaylistAsyncTask() {
super();
}
@Override
protected void onPreExecute() {
}
@Override
protected MediaFile doInBackground(String... filename) {
MediaFile [] mPlayListFiles = null;
try {
Stream stream = new Stream(filename[0]);
PlaylistParser playlist = PlaylistParser.getPlaylistParser(stream.getURL());
if (playlist != null) {
playlist.retrieveAndParsePlaylist();
mPlayListFiles = playlist.getPlaylistFiles();
} else {
mPlayListFiles = new MediaFile[1];
MediaFile mediaFile = new MediaFile();
mediaFile.setURL(stream.getURL().toString());
mediaFile.setTrackNumber(1);
mPlayListFiles[0] = mediaFile;
}
} catch (MalformedURLException ex) {
ex.printStackTrace();
}
return mPlayListFiles[0];
}
@Override
protected void onPostExecute(MediaFile mediaFile) {
try {
mMediaPlayer.setDataSource(mediaFile.getURL().toString());
startAlarm(mMediaPlayer);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
|
Changed alarm timeout to 20 minutes
git-svn-id: 0a6f2aeebe3a339de06b6162ef5a6b6b6e2abc00@739 b8d320ad-ce4d-463a-8333-51e6c77ad906
|
src/net/sourceforge/servestream/alarm/AlarmKlaxon.java
|
Changed alarm timeout to 20 minutes
|
|
Java
|
apache-2.0
|
f52bcf1e328fbbd3372c97ef5047f5a45e599c2f
| 0
|
nikhilvibhav/camel,pmoerenhout/camel,cunningt/camel,tadayosi/camel,mcollovati/camel,tadayosi/camel,tadayosi/camel,tadayosi/camel,gnodet/camel,christophd/camel,apache/camel,cunningt/camel,alvinkwekel/camel,nicolaferraro/camel,gnodet/camel,cunningt/camel,gnodet/camel,cunningt/camel,pax95/camel,tadayosi/camel,adessaigne/camel,pax95/camel,cunningt/camel,nikhilvibhav/camel,adessaigne/camel,tdiesler/camel,gnodet/camel,pax95/camel,mcollovati/camel,pmoerenhout/camel,pax95/camel,apache/camel,DariusX/camel,adessaigne/camel,adessaigne/camel,nicolaferraro/camel,nikhilvibhav/camel,gnodet/camel,mcollovati/camel,pax95/camel,adessaigne/camel,christophd/camel,alvinkwekel/camel,apache/camel,mcollovati/camel,apache/camel,nicolaferraro/camel,christophd/camel,tdiesler/camel,alvinkwekel/camel,tadayosi/camel,pmoerenhout/camel,alvinkwekel/camel,christophd/camel,apache/camel,tdiesler/camel,DariusX/camel,tdiesler/camel,nikhilvibhav/camel,christophd/camel,christophd/camel,pmoerenhout/camel,pmoerenhout/camel,nicolaferraro/camel,cunningt/camel,pax95/camel,tdiesler/camel,DariusX/camel,adessaigne/camel,DariusX/camel,pmoerenhout/camel,apache/camel,tdiesler/camel
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.aws2.msk;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.InvalidPayloadException;
import org.apache.camel.Message;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.awscore.exception.AwsServiceException;
import software.amazon.awssdk.services.kafka.KafkaClient;
import software.amazon.awssdk.services.kafka.model.BrokerNodeGroupInfo;
import software.amazon.awssdk.services.kafka.model.CreateClusterRequest;
import software.amazon.awssdk.services.kafka.model.CreateClusterResponse;
import software.amazon.awssdk.services.kafka.model.DeleteClusterRequest;
import software.amazon.awssdk.services.kafka.model.DeleteClusterResponse;
import software.amazon.awssdk.services.kafka.model.DescribeClusterRequest;
import software.amazon.awssdk.services.kafka.model.DescribeClusterResponse;
import software.amazon.awssdk.services.kafka.model.ListClustersRequest;
import software.amazon.awssdk.services.kafka.model.ListClustersResponse;
/**
* A Producer which sends messages to the Amazon MSK Service
* <a href="http://aws.amazon.com/msk/">AWS MSK</a>
*/
public class MSK2Producer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(MSK2Producer.class);
private transient String mskProducerToString;
public MSK2Producer(Endpoint endpoint) {
super(endpoint);
}
@Override
public void process(Exchange exchange) throws Exception {
switch (determineOperation(exchange)) {
case listClusters:
listClusters(getEndpoint().getMskClient(), exchange);
break;
case createCluster:
createCluster(getEndpoint().getMskClient(), exchange);
break;
case deleteCluster:
deleteCluster(getEndpoint().getMskClient(), exchange);
break;
case describeCluster:
describeCluster(getEndpoint().getMskClient(), exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation");
}
}
private MSK2Operations determineOperation(Exchange exchange) {
MSK2Operations operation = exchange.getIn().getHeader(MSK2Constants.OPERATION, MSK2Operations.class);
if (operation == null) {
operation = getConfiguration().getOperation();
}
return operation;
}
protected MSK2Configuration getConfiguration() {
return getEndpoint().getConfiguration();
}
@Override
public String toString() {
if (mskProducerToString == null) {
mskProducerToString = "MSKProducer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]";
}
return mskProducerToString;
}
@Override
public MSK2Endpoint getEndpoint() {
return (MSK2Endpoint)super.getEndpoint();
}
private void listClusters(KafkaClient mskClient, Exchange exchange) throws InvalidPayloadException {
if (getConfiguration().isPojoRequest()) {
Object payload = exchange.getIn().getMandatoryBody();
if (payload instanceof ListClustersRequest) {
ListClustersResponse result;
try {
result = mskClient.listClusters((ListClustersRequest) payload);
} catch (AwsServiceException ase) {
LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
} else {
ListClustersRequest.Builder builder = ListClustersRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTERS_FILTER))) {
String filter = exchange.getIn().getHeader(MSK2Constants.CLUSTERS_FILTER, String.class);
builder.clusterNameFilter(filter);
}
ListClustersResponse result;
try {
result = mskClient.listClusters(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
}
private void createCluster(KafkaClient mskClient, Exchange exchange) throws InvalidPayloadException {
if (getConfiguration().isPojoRequest()) {
Object payload = exchange.getIn().getMandatoryBody();
if (payload instanceof CreateClusterRequest) {
CreateClusterResponse response;
try {
response = mskClient.createCluster((CreateClusterRequest) payload);
} catch (AwsServiceException ase) {
LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(response);
}
} else {
CreateClusterRequest.Builder builder = CreateClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_NAME))) {
String name = exchange.getIn().getHeader(MSK2Constants.CLUSTER_NAME, String.class);
builder.clusterName(name);
} else {
throw new IllegalArgumentException("Cluster Name must be specified");
}
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_KAFKA_VERSION))) {
String version = exchange.getIn().getHeader(MSK2Constants.CLUSTER_KAFKA_VERSION, String.class);
builder.kafkaVersion(version);
} else {
throw new IllegalArgumentException("Kafka Version must be specified");
}
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_NUMBER))) {
Integer nodesNumber = exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_NUMBER, Integer.class);
builder.numberOfBrokerNodes(nodesNumber);
} else {
throw new IllegalArgumentException("Kafka Version must be specified");
}
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_GROUP_INFO))) {
BrokerNodeGroupInfo brokerNodesGroupInfo = exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_GROUP_INFO, BrokerNodeGroupInfo.class);
builder.brokerNodeGroupInfo(brokerNodesGroupInfo);
} else {
throw new IllegalArgumentException("BrokerNodeGroupInfo must be specified");
}
CreateClusterResponse response;
try {
response = mskClient.createCluster(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(response);
}
}
private void deleteCluster(KafkaClient mskClient, Exchange exchange) {
DeleteClusterRequest.Builder builder = DeleteClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN))) {
String arn = exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN, String.class);
builder.clusterArn(arn);
} else {
throw new IllegalArgumentException("Cluster ARN must be specified");
}
DeleteClusterResponse result;
try {
result = mskClient.deleteCluster(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
private void describeCluster(KafkaClient mskClient, Exchange exchange) {
DescribeClusterRequest.Builder builder = DescribeClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN))) {
String arn = exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN, String.class);
builder.clusterArn(arn);
} else {
throw new IllegalArgumentException("Cluster ARN must be specified");
}
DescribeClusterResponse result;
try {
result = mskClient.describeCluster(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
public static Message getMessageForResponse(final Exchange exchange) {
return exchange.getMessage();
}
}
|
components/camel-aws2-msk/src/main/java/org/apache/camel/component/aws2/msk/MSK2Producer.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.aws2.msk;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.InvalidPayloadException;
import org.apache.camel.Message;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.awscore.exception.AwsServiceException;
import software.amazon.awssdk.services.kafka.KafkaClient;
import software.amazon.awssdk.services.kafka.model.BrokerNodeGroupInfo;
import software.amazon.awssdk.services.kafka.model.CreateClusterRequest;
import software.amazon.awssdk.services.kafka.model.CreateClusterResponse;
import software.amazon.awssdk.services.kafka.model.DeleteClusterRequest;
import software.amazon.awssdk.services.kafka.model.DeleteClusterResponse;
import software.amazon.awssdk.services.kafka.model.DescribeClusterRequest;
import software.amazon.awssdk.services.kafka.model.DescribeClusterResponse;
import software.amazon.awssdk.services.kafka.model.ListClustersRequest;
import software.amazon.awssdk.services.kafka.model.ListClustersResponse;
/**
* A Producer which sends messages to the Amazon MSK Service
* <a href="http://aws.amazon.com/msk/">AWS MSK</a>
*/
public class MSK2Producer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(MSK2Producer.class);
private transient String mskProducerToString;
public MSK2Producer(Endpoint endpoint) {
super(endpoint);
}
@Override
public void process(Exchange exchange) throws Exception {
switch (determineOperation(exchange)) {
case listClusters:
listClusters(getEndpoint().getMskClient(), exchange);
break;
case createCluster:
createCluster(getEndpoint().getMskClient(), exchange);
break;
case deleteCluster:
deleteCluster(getEndpoint().getMskClient(), exchange);
break;
case describeCluster:
describeCluster(getEndpoint().getMskClient(), exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation");
}
}
private MSK2Operations determineOperation(Exchange exchange) {
MSK2Operations operation = exchange.getIn().getHeader(MSK2Constants.OPERATION, MSK2Operations.class);
if (operation == null) {
operation = getConfiguration().getOperation();
}
return operation;
}
protected MSK2Configuration getConfiguration() {
return getEndpoint().getConfiguration();
}
@Override
public String toString() {
if (mskProducerToString == null) {
mskProducerToString = "MSKProducer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]";
}
return mskProducerToString;
}
@Override
public MSK2Endpoint getEndpoint() {
return (MSK2Endpoint)super.getEndpoint();
}
private void listClusters(KafkaClient mskClient, Exchange exchange) throws InvalidPayloadException {
if (getConfiguration().isPojoRequest()) {
Object payload = exchange.getIn().getMandatoryBody();
if (payload instanceof ListClustersRequest) {
ListClustersResponse result;
try {
result = mskClient.listClusters((ListClustersRequest) payload);
} catch (AwsServiceException ase) {
LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
} else {
ListClustersRequest.Builder builder = ListClustersRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTERS_FILTER))) {
String filter = exchange.getIn().getHeader(MSK2Constants.CLUSTERS_FILTER, String.class);
builder.clusterNameFilter(filter);
}
ListClustersResponse result;
try {
result = mskClient.listClusters(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
}
private void createCluster(KafkaClient mskClient, Exchange exchange) {
CreateClusterRequest.Builder builder = CreateClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_NAME))) {
String name = exchange.getIn().getHeader(MSK2Constants.CLUSTER_NAME, String.class);
builder.clusterName(name);
} else {
throw new IllegalArgumentException("Cluster Name must be specified");
}
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_KAFKA_VERSION))) {
String version = exchange.getIn().getHeader(MSK2Constants.CLUSTER_KAFKA_VERSION, String.class);
builder.kafkaVersion(version);
} else {
throw new IllegalArgumentException("Kafka Version must be specified");
}
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_NUMBER))) {
Integer nodesNumber = exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_NUMBER, Integer.class);
builder.numberOfBrokerNodes(nodesNumber);
} else {
throw new IllegalArgumentException("Kafka Version must be specified");
}
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_GROUP_INFO))) {
BrokerNodeGroupInfo brokerNodesGroupInfo = exchange.getIn().getHeader(MSK2Constants.BROKER_NODES_GROUP_INFO, BrokerNodeGroupInfo.class);
builder.brokerNodeGroupInfo(brokerNodesGroupInfo);
} else {
throw new IllegalArgumentException("BrokerNodeGroupInfo must be specified");
}
CreateClusterResponse response;
try {
response = mskClient.createCluster(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(response);
}
private void deleteCluster(KafkaClient mskClient, Exchange exchange) {
DeleteClusterRequest.Builder builder = DeleteClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN))) {
String arn = exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN, String.class);
builder.clusterArn(arn);
} else {
throw new IllegalArgumentException("Cluster ARN must be specified");
}
DeleteClusterResponse result;
try {
result = mskClient.deleteCluster(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
private void describeCluster(KafkaClient mskClient, Exchange exchange) {
DescribeClusterRequest.Builder builder = DescribeClusterRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN))) {
String arn = exchange.getIn().getHeader(MSK2Constants.CLUSTER_ARN, String.class);
builder.clusterArn(arn);
} else {
throw new IllegalArgumentException("Cluster ARN must be specified");
}
DescribeClusterResponse result;
try {
result = mskClient.describeCluster(builder.build());
} catch (AwsServiceException ase) {
LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode());
throw ase;
}
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
public static Message getMessageForResponse(final Exchange exchange) {
return exchange.getMessage();
}
}
|
CAMEL-14868 - Camel-AWS2-*: Where possible, give the possiblity to the end user to pass an AWS Request pojo as body, aws2-msk create broker
|
components/camel-aws2-msk/src/main/java/org/apache/camel/component/aws2/msk/MSK2Producer.java
|
CAMEL-14868 - Camel-AWS2-*: Where possible, give the possiblity to the end user to pass an AWS Request pojo as body, aws2-msk create broker
|
|
Java
|
apache-2.0
|
2402b316dfb0c02941492216a16676dbcaff50b2
| 0
|
ppavlidis/baseCode,ppavlidis/baseCode
|
/*
* The baseCode project
*
* Copyright (c) 2008 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.basecode.ontology.search;
import java.util.Collection;
import java.util.HashSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Selector;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.vocabulary.RDFS;
/**
* Used to limit which parts of ontologies get indexed for searching. This avoids indexing some parts of ontologies such
* as "examples" and "definitions" but this is set up in a partly ontology-specific way (that is, hard-coded).
*
* @author paul
* @version $Id$
*/
public class IndexerSelector implements Selector {
private static Logger log = LoggerFactory.getLogger( IndexerSelector.class );
private final Collection<String> unwantedForIndexing;
public IndexerSelector() {
// these are predicates that in general should not be useful for indexing
unwantedForIndexing = new HashSet<String>();
unwantedForIndexing.add( RDFS.comment.getURI() );
unwantedForIndexing.add( RDFS.seeAlso.getURI() );
unwantedForIndexing.add( RDFS.isDefinedBy.getURI() );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/creator" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/contributor" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/source" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/title" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/description" );
unwantedForIndexing.add( "http://www.w3.org/2002/07/owl#inverseOf" );
unwantedForIndexing.add( "http://www.w3.org/2002/07/owl#disjointWith" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#example" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#editorialNote" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#historyNote" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#definition" );
unwantedForIndexing.add( "http://neurolex.org/wiki/Special:URIResolver/Property-3AExample" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/definition" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/bioportal_provenance" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/gwas_trait" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/definition_editor" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/example_of_usage" );
unwantedForIndexing.add( "http://www.geneontology.org/formats/oboInOwl#Definition" );
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000115" ); // 'definition' - too often has extra
// junk.
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000112" ); // 'example of usage
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000116" ); // editor note.
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000117" ); // term editor
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000114" ); // curation status.
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000232" ); // curator note.
unwantedForIndexing
.add( "http://ontology.neuinfo.org/NIF/Backend/OBO_annotation_properties.owl#externallySourcedDefinition" );
unwantedForIndexing
.add( "http://ontology.neuinfo.org/NIF/Backend/BIRNLex_annotation_properties.owl#birnlexDefinition" );
unwantedForIndexing
.add( "http://ontology.neuinfo.org/NIF/Backend/BIRNLex_annotation_properties.owl#hasBirnlexCurator" );
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#getObject()
*/
@Override
public RDFNode getObject() {
return null;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#getPredicate()
*/
@Override
public Property getPredicate() {
return null;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#getSubject()
*/
@Override
public Resource getSubject() {
return null;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#isSimple()
*/
@Override
public boolean isSimple() {
return false;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#test(com.hp.hpl.jena.rdf.model.Statement)
*/
@Override
public boolean test( Statement s ) {
boolean retain = !unwantedForIndexing.contains( s.getPredicate().getURI() );
// bit of a special case ...
if ( s.getPredicate().getURI().equals( "http://www.w3.org/2002/07/owl#annotatedProperty" ) ) {
retain = !unwantedForIndexing.contains( s.getObject().toString() );
}
if ( !retain && log.isDebugEnabled() ) {
log.debug( "Removed: " + s );
}
return retain;
}
}
|
src/ubic/basecode/ontology/search/IndexerSelector.java
|
/*
* The Gemma project
*
* Copyright (c) 2008 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.basecode.ontology.search;
import java.util.Collection;
import java.util.HashSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Selector;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.vocabulary.RDFS;
/**
* Used to limit which parts of ontologies get indexed for searching. This avoids indexing some parts of ontologies such
* as "examples" and "definitions" but this is set up in a partly ontology-specific way (that is, hard-coded).
*
* @author paul
* @version $Id$
*/
public class IndexerSelector implements Selector {
private static Logger log = LoggerFactory.getLogger( IndexerSelector.class );
private final Collection<String> unwantedForIndexing;
public IndexerSelector() {
// these are predicates that in general should not be useful for indexing
unwantedForIndexing = new HashSet<String>();
unwantedForIndexing.add( RDFS.comment.getURI() );
unwantedForIndexing.add( RDFS.seeAlso.getURI() );
unwantedForIndexing.add( RDFS.isDefinedBy.getURI() );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/creator" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/contributor" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/source" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/title" );
unwantedForIndexing.add( "http://purl.org/dc/elements/1.1/description" );
unwantedForIndexing.add( "http://www.w3.org/2002/07/owl#inverseOf" );
unwantedForIndexing.add( "http://www.w3.org/2002/07/owl#disjointWith" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#example" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#editorialNote" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#historyNote" );
unwantedForIndexing.add( "http://www.w3.org/2004/02/skos/core#definition" );
unwantedForIndexing.add( "http://neurolex.org/wiki/Special:URIResolver/Property-3AExample" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/definition" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/bioportal_provenance" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/gwas_trait" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/definition_editor" );
unwantedForIndexing.add( "http://www.ebi.ac.uk/efo/example_of_usage" );
unwantedForIndexing.add( "http://www.geneontology.org/formats/oboInOwl#Definition" );
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000115" ); // 'definition' - too often has extra
// junk.
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000112" ); // 'example of usage
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000116" ); // editor note.
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000117" ); // term editor
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000114" ); // curation status.
unwantedForIndexing.add( "http://purl.obolibrary.org/obo/IAO_0000232" ); // curator note.
unwantedForIndexing
.add( "http://ontology.neuinfo.org/NIF/Backend/OBO_annotation_properties.owl#externallySourcedDefinition" );
unwantedForIndexing
.add( "http://ontology.neuinfo.org/NIF/Backend/BIRNLex_annotation_properties.owl#birnlexDefinition" );
unwantedForIndexing
.add( "http://ontology.neuinfo.org/NIF/Backend/BIRNLex_annotation_properties.owl#hasBirnlexCurator" );
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#getObject()
*/
@Override
public RDFNode getObject() {
return null;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#getPredicate()
*/
@Override
public Property getPredicate() {
return null;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#getSubject()
*/
@Override
public Resource getSubject() {
return null;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#isSimple()
*/
@Override
public boolean isSimple() {
return false;
}
/*
* (non-Javadoc)
*
* @see com.hp.hpl.jena.rdf.model.Selector#test(com.hp.hpl.jena.rdf.model.Statement)
*/
@Override
public boolean test( Statement s ) {
boolean retain = !unwantedForIndexing.contains( s.getPredicate().getURI() );
// bit of a special case ...
if ( s.getPredicate().getURI().equals( "http://www.w3.org/2002/07/owl#annotatedProperty" ) ) {
retain = !unwantedForIndexing.contains( s.getObject().toString() );
}
if ( !retain && log.isDebugEnabled() ) {
log.debug( "Removed: " + s );
}
return retain;
}
}
|
trivial
|
src/ubic/basecode/ontology/search/IndexerSelector.java
|
trivial
|
|
Java
|
apache-2.0
|
03f018ebc79f444cdfa404e3deef3314a4f0e41e
| 0
|
jmptrader/Strata,OpenGamma/Strata
|
/*
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.basics.date;
import static com.opengamma.strata.collect.TestHelper.coverPrivateConstructor;
import static com.opengamma.strata.collect.TestHelper.date;
import static java.time.DayOfWeek.SATURDAY;
import static java.time.DayOfWeek.SUNDAY;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.LocalDate;
import java.time.MonthDay;
import java.util.ArrayList;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import com.google.common.collect.ImmutableList;
/**
* Test {@code GlobalHolidayCalendars}.
*/
public class GlobalHolidayCalendarsTest {
public static Object[][] data_easter() {
return new Object[][] {
{15, 4, 1900},
{15, 4, 1900},
{7, 4, 1901},
{30, 3, 1902},
{12, 4, 1903},
{3, 4, 1904},
{23, 4, 1905},
{15, 4, 1906},
{31, 3, 1907},
{19, 4, 1908},
{11, 4, 1909},
{27, 3, 1910},
{16, 4, 1911},
{7, 4, 1912},
{23, 3, 1913},
{12, 4, 1914},
{4, 4, 1915},
{23, 4, 1916},
{8, 4, 1917},
{31, 3, 1918},
{20, 4, 1919},
{4, 4, 1920},
{27, 3, 1921},
{16, 4, 1922},
{1, 4, 1923},
{20, 4, 1924},
{12, 4, 1925},
{4, 4, 1926},
{17, 4, 1927},
{8, 4, 1928},
{31, 3, 1929},
{20, 4, 1930},
{5, 4, 1931},
{27, 3, 1932},
{16, 4, 1933},
{1, 4, 1934},
{21, 4, 1935},
{12, 4, 1936},
{28, 3, 1937},
{17, 4, 1938},
{9, 4, 1939},
{24, 3, 1940},
{13, 4, 1941},
{5, 4, 1942},
{25, 4, 1943},
{9, 4, 1944},
{1, 4, 1945},
{21, 4, 1946},
{6, 4, 1947},
{28, 3, 1948},
{17, 4, 1949},
{9, 4, 1950},
{25, 3, 1951},
{13, 4, 1952},
{5, 4, 1953},
{18, 4, 1954},
{10, 4, 1955},
{1, 4, 1956},
{21, 4, 1957},
{6, 4, 1958},
{29, 3, 1959},
{17, 4, 1960},
{2, 4, 1961},
{22, 4, 1962},
{14, 4, 1963},
{29, 3, 1964},
{18, 4, 1965},
{10, 4, 1966},
{26, 3, 1967},
{14, 4, 1968},
{6, 4, 1969},
{29, 3, 1970},
{11, 4, 1971},
{2, 4, 1972},
{22, 4, 1973},
{14, 4, 1974},
{30, 3, 1975},
{18, 4, 1976},
{10, 4, 1977},
{26, 3, 1978},
{15, 4, 1979},
{6, 4, 1980},
{19, 4, 1981},
{11, 4, 1982},
{3, 4, 1983},
{22, 4, 1984},
{7, 4, 1985},
{30, 3, 1986},
{19, 4, 1987},
{3, 4, 1988},
{26, 3, 1989},
{15, 4, 1990},
{31, 3, 1991},
{19, 4, 1992},
{11, 4, 1993},
{3, 4, 1994},
{16, 4, 1995},
{7, 4, 1996},
{30, 3, 1997},
{12, 4, 1998},
{4, 4, 1999},
{23, 4, 2000},
{15, 4, 2001},
{31, 3, 2002},
{20, 4, 2003},
{11, 4, 2004},
{27, 3, 2005},
{16, 4, 2006},
{8, 4, 2007},
{23, 3, 2008},
{12, 4, 2009},
{4, 4, 2010},
{24, 4, 2011},
{8, 4, 2012},
{31, 3, 2013},
{20, 4, 2014},
{5, 4, 2015},
{27, 3, 2016},
{16, 4, 2017},
{1, 4, 2018},
{21, 4, 2019},
{12, 4, 2020},
{4, 4, 2021},
{17, 4, 2022},
{9, 4, 2023},
{31, 3, 2024},
{20, 4, 2025},
{5, 4, 2026},
{28, 3, 2027},
{16, 4, 2028},
{1, 4, 2029},
{21, 4, 2030},
{13, 4, 2031},
{28, 3, 2032},
{17, 4, 2033},
{9, 4, 2034},
{25, 3, 2035},
{13, 4, 2036},
{5, 4, 2037},
{25, 4, 2038},
{10, 4, 2039},
{1, 4, 2040},
{21, 4, 2041},
{6, 4, 2042},
{29, 3, 2043},
{17, 4, 2044},
{9, 4, 2045},
{25, 3, 2046},
{14, 4, 2047},
{5, 4, 2048},
{18, 4, 2049},
{10, 4, 2050},
{2, 4, 2051},
{21, 4, 2052},
{6, 4, 2053},
{29, 3, 2054},
{18, 4, 2055},
{2, 4, 2056},
{22, 4, 2057},
{14, 4, 2058},
{30, 3, 2059},
{18, 4, 2060},
{10, 4, 2061},
{26, 3, 2062},
{15, 4, 2063},
{6, 4, 2064},
{29, 3, 2065},
{11, 4, 2066},
{3, 4, 2067},
{22, 4, 2068},
{14, 4, 2069},
{30, 3, 2070},
{19, 4, 2071},
{10, 4, 2072},
{26, 3, 2073},
{15, 4, 2074},
{7, 4, 2075},
{19, 4, 2076},
{11, 4, 2077},
{3, 4, 2078},
{23, 4, 2079},
{7, 4, 2080},
{30, 3, 2081},
{19, 4, 2082},
{4, 4, 2083},
{26, 3, 2084},
{15, 4, 2085},
{31, 3, 2086},
{20, 4, 2087},
{11, 4, 2088},
{3, 4, 2089},
{16, 4, 2090},
{8, 4, 2091},
{30, 3, 2092},
{12, 4, 2093},
{4, 4, 2094},
{24, 4, 2095},
{15, 4, 2096},
{31, 3, 2097},
{20, 4, 2098},
{12, 4, 2099},
};
}
@ParameterizedTest
@MethodSource("data_easter")
public void test_easter(int day, int month, int year) {
assertThat(GlobalHolidayCalendars.easter(year)).isEqualTo(LocalDate.of(year, month, day));
}
//-------------------------------------------------------------------------
private static final HolidayCalendar GBLO = GlobalHolidayCalendars.generateLondon();
public static Object[][] data_gblo() {
return new Object[][] {
// Whitsun, Last Mon Aug - http://hansard.millbanksystems.com/commons/1964/mar/04/staggered-holidays
{1965, mds(1965, md(4, 16), md(4, 19), md(6, 7), md(8, 30), md(12, 27), md(12, 28))},
// Whitsun May - http://hansard.millbanksystems.com/commons/1964/mar/04/staggered-holidays
// 29th Aug - http://hansard.millbanksystems.com/written_answers/1965/nov/25/august-bank-holiday
{1966, mds(1966, md(4, 8), md(4, 11), md(5, 30), md(8, 29), md(12, 26), md(12, 27))},
// 29th May, 28th Aug - http://hansard.millbanksystems.com/written_answers/1965/jun/03/bank-holidays-1967-and-1968
{1967, mds(1967, md(3, 24), md(3, 27), md(5, 29), md(8, 28), md(12, 25), md(12, 26))},
// 3rd Jun, 2nd Sep - http://hansard.millbanksystems.com/written_answers/1965/jun/03/bank-holidays-1967-and-1968
{1968, mds(1968, md(4, 12), md(4, 15), md(6, 3), md(9, 2), md(12, 25), md(12, 26))},
// 26th May, 1st Sep - http://hansard.millbanksystems.com/written_answers/1967/mar/21/bank-holidays-1969-dates
{1969, mds(1969, md(4, 4), md(4, 7), md(5, 26), md(9, 1), md(12, 25), md(12, 26))},
// 25th May, 31st Aug - http://hansard.millbanksystems.com/written_answers/1967/jul/28/bank-holidays
{1970, mds(1970, md(3, 27), md(3, 30), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
// applying rules
{1971, mds(1971, md(4, 9), md(4, 12), md(5, 31), md(8, 30), md(12, 27), md(12, 28))},
{2009, mds(2009, md(1, 1), md(4, 10), md(4, 13), md(5, 4), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
{2010, mds(2010, md(1, 1), md(4, 2), md(4, 5), md(5, 3), md(5, 31), md(8, 30), md(12, 27), md(12, 28))},
// https://www.gov.uk/bank-holidays
{2012, mds(2012, md(1, 2), md(4, 6), md(4, 9), md(5, 7), md(6, 4), md(6, 5), md(8, 27), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(3, 29), md(4, 1), md(5, 6), md(5, 27), md(8, 26), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 5), md(5, 26), md(8, 25), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 4), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 2), md(5, 30), md(8, 29), md(12, 26), md(12, 27))},
{2020, mds(2020, md(1, 1), md(4, 10), md(4, 13), md(5, 8), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
{2022, mds(2022, md(1, 3), md(4, 15), md(4, 18), md(5, 2), md(6, 2), md(6, 3), md(8, 29), md(12, 26), md(12, 27))},
};
}
@ParameterizedTest
@MethodSource("data_gblo")
public void test_gblo(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(GBLO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar FRPA = GlobalHolidayCalendars.generateParis();
public static Object[][] data_frpa() {
return new Object[][] {
// dates not shifted if fall on a weekend
{2003, mds(2003, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(5, 8), md(5, 29),
md(6, 9), md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2004, mds(2004, md(1, 1), md(4, 9), md(4, 12), md(5, 1), md(5, 8), md(5, 20), md(5, 31),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2005, mds(2005, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 8),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2006, mds(2006, md(1, 1), md(4, 14), md(4, 17), md(5, 1), md(5, 8), md(5, 25),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2007, mds(2007, md(1, 1), md(4, 6), md(4, 9), md(5, 1), md(5, 8), md(5, 17),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2008, mds(2008, md(1, 1), md(3, 21), md(3, 24), md(5, 1), md(5, 8), md(5, 12), md(5, 24),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2012, mds(2012, md(1, 1), md(4, 6), md(4, 9), md(5, 1), md(5, 8), md(5, 17),
md(5, 28), md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(3, 29), md(4, 1), md(5, 1), md(5, 8), md(5, 9), md(5, 20),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(5, 8), md(5, 29),
md(6, 9), md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 1), md(5, 8), md(5, 14), md(5, 25),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 8), md(5, 16),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_frpa")
public void test_frpa(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(FRPA.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar DEFR = GlobalHolidayCalendars.generateFrankfurt();
public static Object[][] data_defr() {
return new Object[][] {
// dates not shifted if fall on a weekend
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(5, 29), md(6, 9), md(6, 19),
md(10, 3), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 1), md(5, 14), md(5, 25), md(6, 4),
md(10, 3), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 16), md(5, 26),
md(10, 3), md(12, 25), md(12, 26), md(12, 31))},
{2017, mds(2017, md(1, 1), md(4, 14), md(4, 17), md(5, 1), md(5, 25), md(6, 5), md(6, 15),
md(10, 3), md(10, 31), md(12, 25), md(12, 26), md(12, 31))},
};
}
@ParameterizedTest
@MethodSource("data_defr")
public void test_defr(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(DEFR.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CHZU = GlobalHolidayCalendars.generateZurich();
public static Object[][] data_chzu() {
return new Object[][] {
// dates not shifted if fall on a weekend
{2012, mds(2012, md(1, 1), md(1, 2), md(4, 6), md(4, 9), md(5, 1), md(5, 17), md(5, 28),
md(8, 1), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(1, 2), md(3, 29), md(4, 1), md(5, 1), md(5, 9), md(5, 20),
md(8, 1), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(1, 2), md(4, 18), md(4, 21), md(5, 1), md(5, 29), md(6, 9),
md(8, 1), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(1, 2), md(4, 3), md(4, 6), md(5, 1), md(5, 14), md(5, 25),
md(8, 1), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(1, 2), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 16),
md(8, 1), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_chzu")
public void test_chzu(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CHZU.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar EUTA = GlobalHolidayCalendars.generateEuropeanTarget();
public static Object[][] data_euta() {
return new Object[][] {
// 1997 - 1998 (testing phase), Jan 1, christmas day
{1997, mds(1997, md(1, 1), md(12, 25))},
{1998, mds(1998, md(1, 1), md(12, 25))},
// in 1999, Jan 1, christmas day, Dec 26, Dec 31
{1999, mds(1999, md(1, 1), md(12, 25), md(12, 31))},
// in 2000, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26
{2000, mds(2000, md(1, 1), md(4, 21), md(4, 24), md(5, 1), md(12, 25), md(12, 26))},
// in 2001, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26, Dec 31
{2001, mds(2001, md(1, 1), md(4, 13), md(4, 16), md(5, 1), md(12, 25), md(12, 26), md(12, 31))},
// from 2002, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26
{2002, mds(2002, md(1, 1), md(3, 29), md(4, 1), md(5, 1), md(12, 25), md(12, 26))},
{2003, mds(2003, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(12, 25), md(12, 26))},
// http://www.ecb.europa.eu/home/html/holidays.en.html
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 1), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_euta")
public void test_euta(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(EUTA.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar USGS = GlobalHolidayCalendars.generateUsGovtSecurities();
public static Object[][] data_usgs() {
return new Object[][] {
// http://www.sifma.org/uploadedfiles/research/statistics/statisticsfiles/misc-us-historical-holiday-market-recommendations-sifma.pdf?n=53384
{1996, mds(1996, md(1, 1), md(1, 15), md(2, 19), md(4, 5), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{1997, mds(1997, md(1, 1), md(1, 20), md(2, 17), md(3, 28), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{1998, mds(1998, md(1, 1), md(1, 19), md(2, 16), md(4, 10), md(5, 25), md(7, 3),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{1999, mds(1999, md(1, 1), md(1, 18), md(2, 15), md(4, 2), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 24))},
{2000, mds(2000, md(1, 17), md(2, 21), md(4, 21), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2001, mds(2001, md(1, 1), md(1, 15), md(2, 19), md(4, 13), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2002, mds(2002, md(1, 1), md(1, 21), md(2, 18), md(3, 29), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2003, mds(2003, md(1, 1), md(1, 20), md(2, 17), md(4, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2004, mds(2004, md(1, 1), md(1, 19), md(2, 16), md(4, 9), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 24))},
{2005, mds(2005, md(1, 17), md(2, 21), md(3, 25), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2006, mds(2006, md(1, 2), md(1, 16), md(2, 20), md(4, 14), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2007, mds(2007, md(1, 1), md(1, 15), md(2, 19), md(4, 6), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(3, 21), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(4, 10), md(5, 25), md(7, 3),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(4, 2), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 24))},
{2011, mds(2011, md(1, 17), md(2, 21), md(4, 22), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(4, 6), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(10, 30), md(11, 12), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(3, 29), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(4, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(4, 3), md(5, 25), md(7, 3),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_usgs")
public void test_usgs(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(USGS.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar USNY = GlobalHolidayCalendars.generateUsNewYork();
public static Object[][] data_usny() {
return new Object[][] {
// http://www.cs.ny.gov/attendance_leave/2012_legal_holidays.cfm
// change year for other pages
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(5, 25), md(7, 4),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 25))},
{2011, mds(2011, md(1, 1), md(1, 17), md(2, 21), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(5, 25), md(7, 4),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_usny")
public void test_usny(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(USNY.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NYFD = GlobalHolidayCalendars.generateNewYorkFed();
public static Object[][] data_nyfd() {
return new Object[][] {
// http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://web.archive.org/web/20080403230805/http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://web.archive.org/web/20100827003740/http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://web.archive.org/web/20031007222458/http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://www.federalreserve.gov/aboutthefed/k8.htm
{2003, mds(2003, md(1, 1), md(1, 20), md(2, 17), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2004, mds(2004, md(1, 1), md(1, 19), md(2, 16), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25))},
{2005, mds(2005, md(1, 17), md(2, 21), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2006, mds(2006, md(1, 2), md(1, 16), md(2, 20), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2007, mds(2007, md(1, 1), md(1, 15), md(2, 19), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(5, 25),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25))},
{2011, mds(2011, md(1, 17), md(2, 21), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(5, 25),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2016, mds(2016, md(1, 1), md(1, 18), md(2, 15), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2017, mds(2017, md(1, 2), md(1, 16), md(2, 20), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2018, mds(2018, md(1, 1), md(1, 15), md(2, 19), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_nyfd")
public void test_nyfd(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NYFD.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NYSE = GlobalHolidayCalendars.generateNewYorkStockExchange();
public static Object[][] data_nyse() {
return new Object[][] {
// https://www.nyse.com/markets/hours-calendars
// http://web.archive.org/web/20110320011340/http://www.nyse.com/about/newsevents/1176373643795.html?sa_campaign=/internal_ads/homepage/08262008holidays
// http://web.archive.org/web/20080901164729/http://www.nyse.com/about/newsevents/1176373643795.html?sa_campaign=/internal_ads/homepage/08262008holidays
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(3, 21), md(5, 26), md(7, 4),
md(9, 1), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(4, 10), md(5, 25), md(7, 3),
md(9, 7), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(4, 2), md(5, 31), md(7, 5),
md(9, 6), md(11, 25), md(12, 24))},
{2011, mds(2011, md(1, 1), md(1, 17), md(2, 21), md(4, 22), md(5, 30), md(7, 4),
md(9, 5), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(4, 6), md(5, 28), md(7, 4),
md(9, 3), md(10, 30), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(3, 29), md(5, 27), md(7, 4),
md(9, 2), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(4, 18), md(5, 26), md(7, 4),
md(9, 1), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(4, 3), md(5, 25), md(7, 3),
md(9, 7), md(11, 26), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_nyse")
public void test_nyse(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NYSE.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar JPTO = GlobalHolidayCalendars.generateTokyo();
public static Object[][] data_jpto() {
return new Object[][] {
// https://www.boj.or.jp/en/about/outline/holi.htm/
// http://web.archive.org/web/20110513190217/http://www.boj.or.jp/en/about/outline/holi.htm/
// https://www.japanspecialist.co.uk/travel-tips/national-holidays-in-japan/
{1999, mds(1999, md(1, 1), md(1, 2), md(1, 3), md(1, 15), md(2, 11), md(3, 22), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 20), md(9, 15), md(9, 23), md(10, 11), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2000, mds(2000, md(1, 1), md(1, 2), md(1, 3), md(1, 10), md(2, 11), md(3, 20), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 20), md(9, 15), md(9, 23), md(10, 9), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2001, mds(2001, md(1, 1), md(1, 2), md(1, 3), md(1, 8), md(2, 12), md(3, 20), md(4, 30), md(5, 3), md(5, 4), md(5, 5),
md(7, 20), md(9, 15), md(9, 24), md(10, 8), md(11, 3), md(11, 23), md(12, 24), md(12, 31))},
{2002, mds(2002, md(1, 1), md(1, 2), md(1, 3), md(1, 14), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 6),
md(7, 20), md(9, 16), md(9, 23), md(10, 14), md(11, 4), md(11, 23), md(12, 23), md(12, 31))},
{2003, mds(2003, md(1, 1), md(1, 2), md(1, 3), md(1, 13), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 21), md(9, 15), md(9, 23), md(10, 13), md(11, 3), md(11, 24), md(12, 23), md(12, 31))},
{2004, mds(2004, md(1, 1), md(1, 2), md(1, 3), md(1, 12), md(2, 11), md(3, 20), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 19), md(9, 20), md(9, 23), md(10, 11), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2005, mds(2005, md(1, 1), md(1, 2), md(1, 3), md(1, 10), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 18), md(9, 19), md(9, 23), md(10, 10), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2006, mds(2006, md(1, 1), md(1, 2), md(1, 3), md(1, 9), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 17), md(9, 18), md(9, 23), md(10, 9), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2011, mds(2011, md(1, 1), md(1, 2), md(1, 3), md(1, 10), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 18), md(9, 19), md(9, 23), md(10, 10), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2012, mds(2012, md(1, 1), md(1, 2), md(1, 3), md(1, 9), md(2, 11), md(3, 20), md(4, 30), md(5, 3), md(5, 4), md(5, 5),
md(7, 16), md(9, 17), md(9, 22), md(10, 8), md(11, 3), md(11, 23), md(12, 24), md(12, 31))},
{2013, mds(2013, md(1, 1), md(1, 2), md(1, 3), md(1, 14), md(2, 11), md(3, 20), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6),
md(7, 15), md(9, 16), md(9, 23), md(10, 14), md(11, 4), md(11, 23), md(12, 23), md(12, 31))},
{2014, mds(2014, md(1, 1), md(1, 2), md(1, 3), md(1, 13), md(2, 11), md(3, 21), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6),
md(7, 21), md(9, 15), md(9, 23), md(10, 13), md(11, 3), md(11, 24), md(12, 23), md(12, 31))},
{2015, mds(2015, md(1, 1), md(1, 2), md(1, 3), md(1, 12), md(2, 11), md(3, 21), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6),
md(7, 20), md(9, 21), md(9, 22), md(9, 23), md(10, 12), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2018, mds(2018, md(1, 1), md(1, 2), md(1, 3), md(1, 8), md(2, 12), md(3, 21), md(4, 30),
md(5, 3), md(5, 4), md(5, 5), md(7, 16), md(8, 11), md(9, 17), md(9, 24),
md(10, 8), md(11, 3), md(11, 23), md(12, 23), md(12, 24), md(12, 31))},
{2019, mds(2019, md(1, 1), md(1, 2), md(1, 3), md(1, 14), md(2, 11), md(3, 21), md(4, 29), md(4, 30),
md(5, 1), md(5, 2), md(5, 3), md(5, 4), md(5, 5), md(5, 6), md(7, 15), md(8, 12), md(9, 16), md(9, 23),
md(10, 14), md(10, 22), md(11, 4), md(11, 23), md(12, 31))},
{2020, mds(2020, md(1, 1), md(1, 2), md(1, 3), md(1, 13), md(2, 11), md(2, 24), md(3, 20), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6), md(7, 23), md(7, 24), md(8, 10), md(9, 21), md(9, 22),
md(11, 3), md(11, 23), md(12, 31))},
{2021, mds(2021, md(1, 1), md(1, 11), md(2, 11), md(2, 23), md(3, 20), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(7, 22), md(7, 23), md(8, 9), md(9, 20),
md(9, 23), md(11, 3), md(11, 23), md(12, 31))},
};
}
@ParameterizedTest
@MethodSource("data_jpto")
public void test_jpto(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(JPTO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar AUSY = GlobalHolidayCalendars.generateSydney();
public static Object[][] data_ausy() {
return new Object[][] {
{2012, mds(2012, md(1, 1), md(1, 2), md(1, 26), md(4, 6), md(4, 7), md(4, 8), md(4, 9),
md(4, 25), md(6, 11), md(8, 6), md(10, 1), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(1, 26), md(1, 28), md(3, 29), md(3, 30), md(3, 31), md(4, 1),
md(4, 25), md(6, 10), md(8, 5), md(10, 7), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(1, 26), md(1, 27), md(4, 18), md(4, 19), md(4, 20), md(4, 21),
md(4, 25), md(6, 9), md(8, 4), md(10, 6), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(1, 26), md(4, 3), md(4, 4), md(4, 5), md(4, 6), md(4, 25),
md(6, 8), md(8, 3), md(10, 5), md(12, 25), md(12, 26), md(12, 27), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 26), md(3, 25), md(3, 26), md(3, 27), md(3, 28),
md(4, 25), md(6, 13), md(8, 1), md(10, 3), md(12, 25), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 1), md(1, 2), md(1, 26), md(4, 14), md(4, 15), md(4, 16), md(4, 17),
md(4, 25), md(6, 12), md(8, 7), md(10, 2), md(12, 25), md(12, 26))},
{2022, mds(2022, md(1, 3), md(1, 26), md(4, 15), md(4, 18),
md(4, 25), md(6, 13), md(8, 1), md(10, 3), md(12, 26), md(12, 27))},
};
}
@ParameterizedTest
@MethodSource("data_ausy")
public void test_ausy(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(AUSY.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar BRBD = GlobalHolidayCalendars.generateBrazil();
public static Object[][] data_brbd() {
// http://www.planalto.gov.br/ccivil_03/leis/2002/L10607.htm
// fixing data
return new Object[][] {
{2013, mds(2013, md(1, 1), md(2, 11), md(2, 12), md(3, 29), md(4, 21), md(5, 1),
md(5, 30), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
{2014, mds(2014, md(1, 1), md(3, 3), md(3, 4), md(4, 18), md(4, 21), md(5, 1),
md(6, 19), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
{2015, mds(2015, md(1, 1), md(2, 16), md(2, 17), md(4, 3), md(4, 21), md(5, 1),
md(6, 4), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
{2016, mds(2016, md(1, 1), md(2, 8), md(2, 9), md(3, 25), md(4, 21), md(5, 1),
md(5, 26), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_brbd")
public void test_brbd(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(BRBD.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CAMO = GlobalHolidayCalendars.generateMontreal();
public static Object[][] data_camo() {
// https://www.bankofcanada.ca/about/contact-information/bank-of-canada-holiday-schedule/
// also indicate day after new year and boxing day, but no other sources for this
return new Object[][] {
{2017, mds(2017, md(1, 2), md(4, 14),
md(5, 22), md(6, 26), md(7, 3), md(9, 4), md(10, 9), md(12, 25))},
{2018, mds(2018, md(1, 1), md(3, 30),
md(5, 21), md(6, 25), md(7, 2), md(9, 3), md(10, 8), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_camo")
public void test_camo(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CAMO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CATO = GlobalHolidayCalendars.generateToronto();
public static Object[][] data_cato() {
return new Object[][] {
{2009, mds(2009, md(1, 1), md(2, 16), md(4, 10),
md(5, 18), md(7, 1), md(8, 3), md(9, 7), md(10, 12), md(11, 11), md(12, 25), md(12, 28))},
{2010, mds(2010, md(1, 1), md(2, 15), md(4, 2),
md(5, 24), md(7, 1), md(8, 2), md(9, 6), md(10, 11), md(11, 11), md(12, 27), md(12, 28))},
{2011, mds(2011, md(1, 3), md(2, 21), md(4, 22),
md(5, 23), md(7, 1), md(8, 1), md(9, 5), md(10, 10), md(11, 11), md(12, 26), md(12, 27))},
{2012, mds(2012, md(1, 2), md(2, 20), md(4, 6),
md(5, 21), md(7, 2), md(8, 6), md(9, 3), md(10, 8), md(11, 12), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(2, 18), md(3, 29),
md(5, 20), md(7, 1), md(8, 5), md(9, 2), md(10, 14), md(11, 11), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(2, 17), md(4, 18),
md(5, 19), md(7, 1), md(8, 4), md(9, 1), md(10, 13), md(11, 11), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(2, 16), md(4, 3),
md(5, 18), md(7, 1), md(8, 3), md(9, 7), md(10, 12), md(11, 11), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(2, 15), md(3, 25),
md(5, 23), md(7, 1), md(8, 1), md(9, 5), md(10, 10), md(11, 11), md(12, 26), md(12, 27))},
};
}
@ParameterizedTest
@MethodSource("data_cato")
public void test_cato(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CATO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CZPR = GlobalHolidayCalendars.generatePrague();
public static Object[][] data_czpr() {
// official data from Czech National Bank
// https://www.cnb.cz/en/public/media_service/schedules/media_svatky.html
return new Object[][] {
{2008, mds(2008, md(1, 1), md(3, 24), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2009, mds(2009, md(1, 1), md(4, 13), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2010, mds(2010, md(1, 1), md(4, 5), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2011, mds(2011, md(1, 1), md(4, 25), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2012, mds(2012, md(1, 1), md(4, 9), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(4, 1), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(4, 21), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 6), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2017, mds(2017, md(1, 1), md(4, 14), md(4, 17), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_czpr")
public void test_czpr(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CZPR.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar DKCO = GlobalHolidayCalendars.generateCopenhagen();
public static Object[][] data_dkco() {
// official data from Danish Bankers association via web archive
return new Object[][] {
{2013, mds(2013, md(1, 1), md(3, 28), md(3, 29), md(4, 1),
md(4, 26), md(5, 9), md(5, 10), md(5, 20), md(6, 5), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2014, mds(2014, md(1, 1), md(4, 17), md(4, 18), md(4, 21),
md(5, 16), md(5, 29), md(5, 30), md(6, 5), md(6, 9), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(4, 2), md(4, 3), md(4, 6),
md(5, 1), md(5, 14), md(5, 15), md(5, 25), md(6, 5), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2016, mds(2016, md(1, 1), md(3, 24), md(3, 25), md(3, 28),
md(4, 22), md(5, 5), md(5, 6), md(5, 16), md(6, 5), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
};
}
@ParameterizedTest
@MethodSource("data_dkco")
public void test_dkco(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(DKCO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar HUBU = GlobalHolidayCalendars.generateBudapest();
public static Object[][] data_hubu() {
// http://www.mnb.hu/letoltes/bubor2.xls
// http://holidays.kayaposoft.com/public_holidays.php?year=2013&country=hun®ion=#
return new Object[][] {
{2012, mds(2012, md(3, 15), md(3, 16), md(4, 9), md(4, 30), md(5, 1), md(5, 28),
md(8, 20), md(10, 22), md(10, 23), md(11, 1), md(11, 2), md(12, 24), md(12, 25), md(12, 26), md(12, 31)),
ImmutableList.of(date(2012, 3, 24), date(2012, 5, 5), date(2012, 10, 27),
date(2012, 11, 10), date(2012, 12, 15), date(2012, 12, 29))},
{2013, mds(2013, md(1, 1), md(3, 15), md(4, 1), md(5, 1), md(5, 20),
md(8, 19), md(8, 20), md(10, 23), md(11, 1), md(12, 24), md(12, 25), md(12, 26), md(12, 27)),
ImmutableList.of(date(2013, 8, 24), date(2013, 12, 7), date(2013, 12, 21))},
{2014, mds(2014, md(1, 1), md(3, 15), md(4, 21), md(5, 1), md(5, 2),
md(6, 9), md(8, 20), md(10, 23), md(10, 24), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2014, 5, 10), date(2014, 10, 18))},
{2015, mds(2015, md(1, 1), md(1, 2), md(3, 15), md(4, 6), md(5, 1), md(5, 25),
md(8, 20), md(8, 21), md(10, 23), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2015, 1, 10), date(2015, 8, 8), date(2015, 12, 12))},
{2016, mds(2016, md(1, 1), md(3, 14), md(3, 15), md(3, 28), md(5, 1), md(5, 16),
md(10, 31), md(11, 1), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2016, 3, 5), date(2016, 10, 15))},
{2020, mds(2020, md(1, 1), md(3, 15), md(4, 10), md(4, 13), md(5, 1), md(6, 1),
md(8, 20), md(8, 21), md(10, 23), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2020, 8, 29), date(2020, 12, 12))},
};
}
@ParameterizedTest
@MethodSource("data_hubu")
public void test_hubu(int year, List<LocalDate> holidays, List<LocalDate> workDays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = (holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY) &&
!workDays.contains(date);
assertThat(HUBU.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar MXMC = GlobalHolidayCalendars.generateMexicoCity();
public static Object[][] data_mxmc() {
// http://www.banxico.org.mx/SieInternet/consultarDirectorioInternetAction.do?accion=consultarCuadro&idCuadro=CF111&locale=en
return new Object[][] {
{2012, mds(2012, md(1, 1), md(2, 6), md(3, 19), md(4, 5), md(4, 6),
md(5, 1), md(9, 16), md(11, 2), md(11, 19), md(12, 12), md(12, 25))},
{2013, mds(2013, md(1, 1), md(2, 4), md(3, 18), md(3, 28), md(3, 29),
md(5, 1), md(9, 16), md(11, 2), md(11, 18), md(12, 12), md(12, 25))},
{2014, mds(2014, md(1, 1), md(2, 3), md(3, 17), md(4, 17), md(4, 18),
md(5, 1), md(9, 16), md(11, 2), md(11, 17), md(12, 12), md(12, 25))},
{2015, mds(2015, md(1, 1), md(2, 2), md(3, 16), md(4, 2), md(4, 3),
md(5, 1), md(9, 16), md(11, 2), md(11, 16), md(12, 12), md(12, 25))},
{2016, mds(2016, md(1, 1), md(2, 1), md(3, 21), md(3, 24), md(3, 25),
md(5, 1), md(9, 16), md(11, 2), md(11, 21), md(12, 12), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_mxmc")
public void test_mxmc(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(MXMC.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NOOS = GlobalHolidayCalendars.generateOslo();
public static Object[][] data_noos() {
// official data from Oslo Bors via web archive
return new Object[][] {
{2009, mds(2009, md(1, 1), md(4, 9), md(4, 10), md(4, 13),
md(5, 1), md(5, 21), md(6, 1), md(12, 24), md(12, 25), md(12, 31))},
{2011, mds(2011, md(4, 21), md(4, 22), md(4, 25),
md(5, 17), md(6, 2), md(6, 13), md(12, 26))},
{2012, mds(2012, md(4, 5), md(4, 6), md(4, 9),
md(5, 1), md(5, 17), md(5, 28), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2013, mds(2013, md(1, 1), md(3, 28), md(3, 29), md(4, 1),
md(5, 1), md(5, 9), md(5, 17), md(5, 20), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2014, mds(2014, md(1, 1), md(4, 17), md(4, 18), md(4, 21),
md(5, 1), md(5, 17), md(5, 29), md(6, 9), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(4, 2), md(4, 3), md(4, 6),
md(5, 1), md(5, 14), md(5, 25), md(12, 24), md(12, 25), md(12, 31))},
{2016, mds(2016, md(1, 1), md(3, 24), md(3, 25), md(3, 28),
md(5, 5), md(5, 16), md(5, 17), md(12, 26))},
{2017, mds(2017, md(4, 13), md(4, 14), md(4, 17),
md(5, 1), md(5, 17), md(5, 25), md(6, 5), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_noos")
public void test_noos(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NOOS.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NZAU = GlobalHolidayCalendars.generateAuckland();
public static Object[][] data_nzau() {
// https://www.govt.nz/browse/work/public-holidays-and-work/public-holidays-and-anniversary-dates/
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/dates-for-previous-years/
return new Object[][] {
{2015, mds(2015, md(1, 1), md(1, 2), md(1, 26), md(2, 6), md(4, 3), md(4, 6),
md(4, 27), md(6, 1), md(10, 26), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 4), md(2, 1), md(2, 8), md(3, 25), md(3, 28),
md(4, 25), md(6, 6), md(10, 24), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 2), md(1, 3), md(1, 30), md(2, 6), md(4, 14), md(4, 17),
md(4, 25), md(6, 5), md(10, 23), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 2), md(1, 29), md(2, 6), md(3, 30), md(4, 2),
md(4, 25), md(6, 4), md(10, 22), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_nzau")
public void test_nzau(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NZAU.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NZWE = GlobalHolidayCalendars.generateWellington();
public static Object[][] data_nzwe() {
// https://www.govt.nz/browse/work/public-holidays-and-work/public-holidays-and-anniversary-dates/
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/dates-for-previous-years/
return new Object[][] {
{2015, mds(2015, md(1, 1), md(1, 2), md(1, 19), md(2, 6), md(4, 3), md(4, 6),
md(4, 27), md(6, 1), md(10, 26), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 4), md(1, 25), md(2, 8), md(3, 25), md(3, 28),
md(4, 25), md(6, 6), md(10, 24), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 2), md(1, 3), md(1, 23), md(2, 6), md(4, 14), md(4, 17),
md(4, 25), md(6, 5), md(10, 23), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 2), md(1, 22), md(2, 6), md(3, 30), md(4, 2),
md(4, 25), md(6, 4), md(10, 22), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_nzwe")
public void test_nzwe(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NZWE.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NZBD = GlobalHolidayCalendars.generateNewZealand();
public static Object[][] data_nzbd() {
// https://www.govt.nz/browse/work/public-holidays-and-work/public-holidays-and-anniversary-dates/
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/dates-for-previous-years/
return new Object[][] {
{2015, mds(2015, md(1, 1), md(1, 2), md(2, 6), md(4, 3), md(4, 6),
md(4, 27), md(6, 1), md(10, 26), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 4), md(2, 8), md(3, 25), md(3, 28),
md(4, 25), md(6, 6), md(10, 24), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 2), md(1, 3), md(2, 6), md(4, 14), md(4, 17),
md(4, 25), md(6, 5), md(10, 23), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 2), md(2, 6), md(3, 30), md(4, 2),
md(4, 25), md(6, 4), md(10, 22), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_nzbd")
public void test_nzbd(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NZBD.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar PLWA = GlobalHolidayCalendars.generateWarsaw();
public static Object[][] data_plwa() {
// based on government law data and stock exchange holidays
return new Object[][] {
{2013, mds(2013, md(1, 1), md(4, 1),
md(5, 1), md(5, 3), md(5, 30), md(8, 15), md(11, 1), md(11, 11), md(12, 24), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(1, 6), md(4, 21),
md(5, 1), md(6, 19), md(8, 15), md(11, 11), md(12, 24), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(1, 6), md(4, 6),
md(5, 1), md(6, 4), md(11, 11), md(12, 24), md(12, 25), md(12, 31))},
{2016, mds(2016, md(1, 1), md(1, 6), md(3, 28),
md(5, 3), md(5, 26), md(8, 15), md(11, 1), md(11, 11), md(12, 26))},
{2017, mds(2017, md(1, 6), md(4, 17),
md(5, 1), md(5, 3), md(6, 15), md(8, 15), md(11, 1), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 6), md(4, 1), md(4, 2), md(5, 1), md(5, 3),
md(5, 20), md(5, 31), md(8, 15), md(11, 1), md(11, 11), md(11, 12), md(12, 24), md(12, 25), md(12, 26), md(12, 31))}
};
}
@ParameterizedTest
@MethodSource("data_plwa")
public void test_plwa(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(PLWA.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar SEST = GlobalHolidayCalendars.generateStockholm();
public static Object[][] data_sest() {
// official data from published fixing dates
return new Object[][] {
{2014, mds(2014, md(1, 1), md(1, 6), md(4, 18), md(4, 21),
md(5, 1), md(5, 29), md(6, 6), md(6, 20), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(1, 6), md(4, 3), md(4, 6),
md(5, 1), md(5, 14), md(6, 19), md(12, 24), md(12, 25), md(12, 31))},
{2016, mds(2016, md(1, 1), md(1, 6), md(3, 25), md(3, 28),
md(5, 5), md(6, 6), md(6, 24), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_sest")
public void test_sest(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(SEST.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar ZAJO = GlobalHolidayCalendars.generateJohannesburg();
public static Object[][] data_zajo() {
// http://www.gov.za/about-sa/public-holidays
// https://web.archive.org/web/20151230214958/http://www.gov.za/about-sa/public-holidays
return new Object[][] {
{2015, mds(2015, md(1, 1), md(3, 21), md(4, 3), md(4, 6), md(4, 27), md(5, 1),
md(6, 16), md(8, 10), md(9, 24), md(12, 16), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(3, 21), md(3, 25), md(3, 28), md(4, 27), md(5, 2),
md(6, 16), md(8, 3), md(8, 9), md(9, 24), md(12, 16), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 1), md(1, 2), md(3, 21), md(4, 14), md(4, 17), md(4, 27), md(5, 1),
md(6, 16), md(8, 9), md(9, 25), md(12, 16), md(12, 16), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_zajo")
public void test_zajo(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(ZAJO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
@Test
public void test_combinedWith() {
HolidayCalendar combined =
ImmutableHolidayCalendar.combined((ImmutableHolidayCalendar) JPTO, (ImmutableHolidayCalendar) USNY);
LocalDate date = LocalDate.of(1950, 1, 1);
while (date.getYear() < 2040) {
assertThat(combined.isHoliday(date)).as("Date: " + date).isEqualTo(JPTO.isHoliday(date) || USNY.isHoliday(date));
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static List<LocalDate> mds(int year, MonthDay... monthDays) {
List<LocalDate> holidays = new ArrayList<>();
for (MonthDay md : monthDays) {
holidays.add(md.atYear(year));
}
return holidays;
}
private static MonthDay md(int month, int day) {
return MonthDay.of(month, day);
}
//-------------------------------------------------------------------------
public static void coverage() {
coverPrivateConstructor(GlobalHolidayCalendars.class);
}
}
|
modules/basics/src/test/java/com/opengamma/strata/basics/date/GlobalHolidayCalendarsTest.java
|
/*
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.basics.date;
import static com.opengamma.strata.collect.TestHelper.coverPrivateConstructor;
import static com.opengamma.strata.collect.TestHelper.date;
import static java.time.DayOfWeek.SATURDAY;
import static java.time.DayOfWeek.SUNDAY;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.LocalDate;
import java.time.MonthDay;
import java.util.ArrayList;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import com.google.common.collect.ImmutableList;
/**
* Test {@code GlobalHolidayCalendars}.
*/
public class GlobalHolidayCalendarsTest {
public static Object[][] data_easter() {
return new Object[][] {
{15, 4, 1900},
{15, 4, 1900},
{7, 4, 1901},
{30, 3, 1902},
{12, 4, 1903},
{3, 4, 1904},
{23, 4, 1905},
{15, 4, 1906},
{31, 3, 1907},
{19, 4, 1908},
{11, 4, 1909},
{27, 3, 1910},
{16, 4, 1911},
{7, 4, 1912},
{23, 3, 1913},
{12, 4, 1914},
{4, 4, 1915},
{23, 4, 1916},
{8, 4, 1917},
{31, 3, 1918},
{20, 4, 1919},
{4, 4, 1920},
{27, 3, 1921},
{16, 4, 1922},
{1, 4, 1923},
{20, 4, 1924},
{12, 4, 1925},
{4, 4, 1926},
{17, 4, 1927},
{8, 4, 1928},
{31, 3, 1929},
{20, 4, 1930},
{5, 4, 1931},
{27, 3, 1932},
{16, 4, 1933},
{1, 4, 1934},
{21, 4, 1935},
{12, 4, 1936},
{28, 3, 1937},
{17, 4, 1938},
{9, 4, 1939},
{24, 3, 1940},
{13, 4, 1941},
{5, 4, 1942},
{25, 4, 1943},
{9, 4, 1944},
{1, 4, 1945},
{21, 4, 1946},
{6, 4, 1947},
{28, 3, 1948},
{17, 4, 1949},
{9, 4, 1950},
{25, 3, 1951},
{13, 4, 1952},
{5, 4, 1953},
{18, 4, 1954},
{10, 4, 1955},
{1, 4, 1956},
{21, 4, 1957},
{6, 4, 1958},
{29, 3, 1959},
{17, 4, 1960},
{2, 4, 1961},
{22, 4, 1962},
{14, 4, 1963},
{29, 3, 1964},
{18, 4, 1965},
{10, 4, 1966},
{26, 3, 1967},
{14, 4, 1968},
{6, 4, 1969},
{29, 3, 1970},
{11, 4, 1971},
{2, 4, 1972},
{22, 4, 1973},
{14, 4, 1974},
{30, 3, 1975},
{18, 4, 1976},
{10, 4, 1977},
{26, 3, 1978},
{15, 4, 1979},
{6, 4, 1980},
{19, 4, 1981},
{11, 4, 1982},
{3, 4, 1983},
{22, 4, 1984},
{7, 4, 1985},
{30, 3, 1986},
{19, 4, 1987},
{3, 4, 1988},
{26, 3, 1989},
{15, 4, 1990},
{31, 3, 1991},
{19, 4, 1992},
{11, 4, 1993},
{3, 4, 1994},
{16, 4, 1995},
{7, 4, 1996},
{30, 3, 1997},
{12, 4, 1998},
{4, 4, 1999},
{23, 4, 2000},
{15, 4, 2001},
{31, 3, 2002},
{20, 4, 2003},
{11, 4, 2004},
{27, 3, 2005},
{16, 4, 2006},
{8, 4, 2007},
{23, 3, 2008},
{12, 4, 2009},
{4, 4, 2010},
{24, 4, 2011},
{8, 4, 2012},
{31, 3, 2013},
{20, 4, 2014},
{5, 4, 2015},
{27, 3, 2016},
{16, 4, 2017},
{1, 4, 2018},
{21, 4, 2019},
{12, 4, 2020},
{4, 4, 2021},
{17, 4, 2022},
{9, 4, 2023},
{31, 3, 2024},
{20, 4, 2025},
{5, 4, 2026},
{28, 3, 2027},
{16, 4, 2028},
{1, 4, 2029},
{21, 4, 2030},
{13, 4, 2031},
{28, 3, 2032},
{17, 4, 2033},
{9, 4, 2034},
{25, 3, 2035},
{13, 4, 2036},
{5, 4, 2037},
{25, 4, 2038},
{10, 4, 2039},
{1, 4, 2040},
{21, 4, 2041},
{6, 4, 2042},
{29, 3, 2043},
{17, 4, 2044},
{9, 4, 2045},
{25, 3, 2046},
{14, 4, 2047},
{5, 4, 2048},
{18, 4, 2049},
{10, 4, 2050},
{2, 4, 2051},
{21, 4, 2052},
{6, 4, 2053},
{29, 3, 2054},
{18, 4, 2055},
{2, 4, 2056},
{22, 4, 2057},
{14, 4, 2058},
{30, 3, 2059},
{18, 4, 2060},
{10, 4, 2061},
{26, 3, 2062},
{15, 4, 2063},
{6, 4, 2064},
{29, 3, 2065},
{11, 4, 2066},
{3, 4, 2067},
{22, 4, 2068},
{14, 4, 2069},
{30, 3, 2070},
{19, 4, 2071},
{10, 4, 2072},
{26, 3, 2073},
{15, 4, 2074},
{7, 4, 2075},
{19, 4, 2076},
{11, 4, 2077},
{3, 4, 2078},
{23, 4, 2079},
{7, 4, 2080},
{30, 3, 2081},
{19, 4, 2082},
{4, 4, 2083},
{26, 3, 2084},
{15, 4, 2085},
{31, 3, 2086},
{20, 4, 2087},
{11, 4, 2088},
{3, 4, 2089},
{16, 4, 2090},
{8, 4, 2091},
{30, 3, 2092},
{12, 4, 2093},
{4, 4, 2094},
{24, 4, 2095},
{15, 4, 2096},
{31, 3, 2097},
{20, 4, 2098},
{12, 4, 2099},
};
}
@ParameterizedTest
@MethodSource("data_easter")
public void test_easter(int day, int month, int year) {
assertThat(GlobalHolidayCalendars.easter(year)).isEqualTo(LocalDate.of(year, month, day));
}
//-------------------------------------------------------------------------
private static final HolidayCalendar GBLO = GlobalHolidayCalendars.generateLondon();
public static Object[][] data_gblo() {
return new Object[][] {
// Whitsun, Last Mon Aug - http://hansard.millbanksystems.com/commons/1964/mar/04/staggered-holidays
{1965, mds(1965, md(4, 16), md(4, 19), md(6, 7), md(8, 30), md(12, 27), md(12, 28))},
// Whitsun May - http://hansard.millbanksystems.com/commons/1964/mar/04/staggered-holidays
// 29th Aug - http://hansard.millbanksystems.com/written_answers/1965/nov/25/august-bank-holiday
{1966, mds(1966, md(4, 8), md(4, 11), md(5, 30), md(8, 29), md(12, 26), md(12, 27))},
// 29th May, 28th Aug - http://hansard.millbanksystems.com/written_answers/1965/jun/03/bank-holidays-1967-and-1968
{1967, mds(1967, md(3, 24), md(3, 27), md(5, 29), md(8, 28), md(12, 25), md(12, 26))},
// 3rd Jun, 2nd Sep - http://hansard.millbanksystems.com/written_answers/1965/jun/03/bank-holidays-1967-and-1968
{1968, mds(1968, md(4, 12), md(4, 15), md(6, 3), md(9, 2), md(12, 25), md(12, 26))},
// 26th May, 1st Sep - http://hansard.millbanksystems.com/written_answers/1967/mar/21/bank-holidays-1969-dates
{1969, mds(1969, md(4, 4), md(4, 7), md(5, 26), md(9, 1), md(12, 25), md(12, 26))},
// 25th May, 31st Aug - http://hansard.millbanksystems.com/written_answers/1967/jul/28/bank-holidays
{1970, mds(1970, md(3, 27), md(3, 30), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
// applying rules
{1971, mds(1971, md(4, 9), md(4, 12), md(5, 31), md(8, 30), md(12, 27), md(12, 28))},
{2009, mds(2009, md(1, 1), md(4, 10), md(4, 13), md(5, 4), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
{2010, mds(2010, md(1, 1), md(4, 2), md(4, 5), md(5, 3), md(5, 31), md(8, 30), md(12, 27), md(12, 28))},
// https://www.gov.uk/bank-holidays
{2012, mds(2012, md(1, 2), md(4, 6), md(4, 9), md(5, 7), md(6, 4), md(6, 5), md(8, 27), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(3, 29), md(4, 1), md(5, 6), md(5, 27), md(8, 26), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 5), md(5, 26), md(8, 25), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 4), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 2), md(5, 30), md(8, 29), md(12, 26), md(12, 27))},
{2020, mds(2020, md(1, 1), md(4, 10), md(4, 13), md(5, 8), md(5, 25), md(8, 31), md(12, 25), md(12, 28))},
{2022, mds(2022, md(1, 3), md(4, 15), md(4, 18), md(5, 2), md(6, 2), md(6, 3), md(8, 29), md(12, 26), md(12, 27))},
};
}
@ParameterizedTest
@MethodSource("data_gblo")
public void test_gblo(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(GBLO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar FRPA = GlobalHolidayCalendars.generateParis();
public static Object[][] data_frpa() {
return new Object[][] {
// dates not shifted if fall on a weekend
{2003, mds(2003, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(5, 8), md(5, 29),
md(6, 9), md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2004, mds(2004, md(1, 1), md(4, 9), md(4, 12), md(5, 1), md(5, 8), md(5, 20), md(5, 31),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2005, mds(2005, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 8),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2006, mds(2006, md(1, 1), md(4, 14), md(4, 17), md(5, 1), md(5, 8), md(5, 25),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2007, mds(2007, md(1, 1), md(4, 6), md(4, 9), md(5, 1), md(5, 8), md(5, 17),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2008, mds(2008, md(1, 1), md(3, 21), md(3, 24), md(5, 1), md(5, 8), md(5, 12), md(5, 24),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2012, mds(2012, md(1, 1), md(4, 6), md(4, 9), md(5, 1), md(5, 8), md(5, 17),
md(5, 28), md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(3, 29), md(4, 1), md(5, 1), md(5, 8), md(5, 9), md(5, 20),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(5, 8), md(5, 29),
md(6, 9), md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 1), md(5, 8), md(5, 14), md(5, 25),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 8), md(5, 16),
md(7, 14), md(8, 15), md(11, 1), md(11, 11), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_frpa")
public void test_frpa(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(FRPA.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar DEFR = GlobalHolidayCalendars.generateFrankfurt();
public static Object[][] data_defr() {
return new Object[][] {
// dates not shifted if fall on a weekend
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(5, 29), md(6, 9), md(6, 19),
md(10, 3), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 1), md(5, 14), md(5, 25), md(6, 4),
md(10, 3), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 16), md(5, 26),
md(10, 3), md(12, 25), md(12, 26), md(12, 31))},
{2017, mds(2017, md(1, 1), md(4, 14), md(4, 17), md(5, 1), md(5, 25), md(6, 5), md(6, 15),
md(10, 3), md(10, 31), md(12, 25), md(12, 26), md(12, 31))},
};
}
@ParameterizedTest
@MethodSource("data_defr")
public void test_defr(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(DEFR.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CHZU = GlobalHolidayCalendars.generateZurich();
public static Object[][] data_chzu() {
return new Object[][] {
// dates not shifted if fall on a weekend
{2012, mds(2012, md(1, 1), md(1, 2), md(4, 6), md(4, 9), md(5, 1), md(5, 17), md(5, 28),
md(8, 1), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(1, 2), md(3, 29), md(4, 1), md(5, 1), md(5, 9), md(5, 20),
md(8, 1), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(1, 2), md(4, 18), md(4, 21), md(5, 1), md(5, 29), md(6, 9),
md(8, 1), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(1, 2), md(4, 3), md(4, 6), md(5, 1), md(5, 14), md(5, 25),
md(8, 1), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(1, 2), md(3, 25), md(3, 28), md(5, 1), md(5, 5), md(5, 16),
md(8, 1), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_chzu")
public void test_chzu(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CHZU.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar EUTA = GlobalHolidayCalendars.generateEuropeanTarget();
public static Object[][] data_euta() {
return new Object[][] {
// 1997 - 1998 (testing phase), Jan 1, christmas day
{1997, mds(1997, md(1, 1), md(12, 25))},
{1998, mds(1998, md(1, 1), md(12, 25))},
// in 1999, Jan 1, christmas day, Dec 26, Dec 31
{1999, mds(1999, md(1, 1), md(12, 25), md(12, 31))},
// in 2000, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26
{2000, mds(2000, md(1, 1), md(4, 21), md(4, 24), md(5, 1), md(12, 25), md(12, 26))},
// in 2001, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26, Dec 31
{2001, mds(2001, md(1, 1), md(4, 13), md(4, 16), md(5, 1), md(12, 25), md(12, 26), md(12, 31))},
// from 2002, Jan 1, good friday, easter monday, May 1, christmas day, Dec 26
{2002, mds(2002, md(1, 1), md(3, 29), md(4, 1), md(5, 1), md(12, 25), md(12, 26))},
{2003, mds(2003, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(12, 25), md(12, 26))},
// http://www.ecb.europa.eu/home/html/holidays.en.html
{2014, mds(2014, md(1, 1), md(4, 18), md(4, 21), md(5, 1), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 3), md(4, 6), md(5, 1), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_euta")
public void test_euta(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(EUTA.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar USGS = GlobalHolidayCalendars.generateUsGovtSecurities();
public static Object[][] data_usgs() {
return new Object[][] {
// http://www.sifma.org/uploadedfiles/research/statistics/statisticsfiles/misc-us-historical-holiday-market-recommendations-sifma.pdf?n=53384
{1996, mds(1996, md(1, 1), md(1, 15), md(2, 19), md(4, 5), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{1997, mds(1997, md(1, 1), md(1, 20), md(2, 17), md(3, 28), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{1998, mds(1998, md(1, 1), md(1, 19), md(2, 16), md(4, 10), md(5, 25), md(7, 3),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{1999, mds(1999, md(1, 1), md(1, 18), md(2, 15), md(4, 2), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 24))},
{2000, mds(2000, md(1, 17), md(2, 21), md(4, 21), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2001, mds(2001, md(1, 1), md(1, 15), md(2, 19), md(4, 13), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2002, mds(2002, md(1, 1), md(1, 21), md(2, 18), md(3, 29), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2003, mds(2003, md(1, 1), md(1, 20), md(2, 17), md(4, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2004, mds(2004, md(1, 1), md(1, 19), md(2, 16), md(4, 9), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 24))},
{2005, mds(2005, md(1, 17), md(2, 21), md(3, 25), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2006, mds(2006, md(1, 2), md(1, 16), md(2, 20), md(4, 14), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2007, mds(2007, md(1, 1), md(1, 15), md(2, 19), md(4, 6), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(3, 21), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(4, 10), md(5, 25), md(7, 3),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(4, 2), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 24))},
{2011, mds(2011, md(1, 17), md(2, 21), md(4, 22), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(4, 6), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(10, 30), md(11, 12), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(3, 29), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(4, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(4, 3), md(5, 25), md(7, 3),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_usgs")
public void test_usgs(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(USGS.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar USNY = GlobalHolidayCalendars.generateUsNewYork();
public static Object[][] data_usny() {
return new Object[][] {
// http://www.cs.ny.gov/attendance_leave/2012_legal_holidays.cfm
// change year for other pages
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(5, 25), md(7, 4),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25), md(12, 25))},
{2011, mds(2011, md(1, 1), md(1, 17), md(2, 21), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(5, 25), md(7, 4),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_usny")
public void test_usny(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(USNY.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NYFD = GlobalHolidayCalendars.generateNewYorkFed();
public static Object[][] data_nyfd() {
return new Object[][] {
// http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://web.archive.org/web/20080403230805/http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://web.archive.org/web/20100827003740/http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://web.archive.org/web/20031007222458/http://www.ny.frb.org/aboutthefed/holiday_schedule.html
// http://www.federalreserve.gov/aboutthefed/k8.htm
{2003, mds(2003, md(1, 1), md(1, 20), md(2, 17), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2004, mds(2004, md(1, 1), md(1, 19), md(2, 16), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25))},
{2005, mds(2005, md(1, 17), md(2, 21), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2006, mds(2006, md(1, 2), md(1, 16), md(2, 20), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2007, mds(2007, md(1, 1), md(1, 15), md(2, 19), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(5, 25),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(5, 31), md(7, 5),
md(9, 6), md(10, 11), md(11, 11), md(11, 25))},
{2011, mds(2011, md(1, 17), md(2, 21), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(5, 27), md(7, 4),
md(9, 2), md(10, 14), md(11, 11), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(5, 26), md(7, 4),
md(9, 1), md(10, 13), md(11, 11), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(5, 25),
md(9, 7), md(10, 12), md(11, 11), md(11, 26), md(12, 25))},
{2016, mds(2016, md(1, 1), md(1, 18), md(2, 15), md(5, 30), md(7, 4),
md(9, 5), md(10, 10), md(11, 11), md(11, 24), md(12, 26))},
{2017, mds(2017, md(1, 2), md(1, 16), md(2, 20), md(5, 29), md(7, 4),
md(9, 4), md(10, 9), md(11, 23), md(12, 25))},
{2018, mds(2018, md(1, 1), md(1, 15), md(2, 19), md(5, 28), md(7, 4),
md(9, 3), md(10, 8), md(11, 12), md(11, 22), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_nyfd")
public void test_nyfd(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NYFD.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NYSE = GlobalHolidayCalendars.generateNewYorkStockExchange();
public static Object[][] data_nyse() {
return new Object[][] {
// https://www.nyse.com/markets/hours-calendars
// http://web.archive.org/web/20110320011340/http://www.nyse.com/about/newsevents/1176373643795.html?sa_campaign=/internal_ads/homepage/08262008holidays
// http://web.archive.org/web/20080901164729/http://www.nyse.com/about/newsevents/1176373643795.html?sa_campaign=/internal_ads/homepage/08262008holidays
{2008, mds(2008, md(1, 1), md(1, 21), md(2, 18), md(3, 21), md(5, 26), md(7, 4),
md(9, 1), md(11, 27), md(12, 25))},
{2009, mds(2009, md(1, 1), md(1, 19), md(2, 16), md(4, 10), md(5, 25), md(7, 3),
md(9, 7), md(11, 26), md(12, 25))},
{2010, mds(2010, md(1, 1), md(1, 18), md(2, 15), md(4, 2), md(5, 31), md(7, 5),
md(9, 6), md(11, 25), md(12, 24))},
{2011, mds(2011, md(1, 1), md(1, 17), md(2, 21), md(4, 22), md(5, 30), md(7, 4),
md(9, 5), md(11, 24), md(12, 26))},
{2012, mds(2012, md(1, 2), md(1, 16), md(2, 20), md(4, 6), md(5, 28), md(7, 4),
md(9, 3), md(10, 30), md(11, 22), md(12, 25))},
{2013, mds(2013, md(1, 1), md(1, 21), md(2, 18), md(3, 29), md(5, 27), md(7, 4),
md(9, 2), md(11, 28), md(12, 25))},
{2014, mds(2014, md(1, 1), md(1, 20), md(2, 17), md(4, 18), md(5, 26), md(7, 4),
md(9, 1), md(11, 27), md(12, 25))},
{2015, mds(2015, md(1, 1), md(1, 19), md(2, 16), md(4, 3), md(5, 25), md(7, 3),
md(9, 7), md(11, 26), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_nyse")
public void test_nyse(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NYSE.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar JPTO = GlobalHolidayCalendars.generateTokyo();
public static Object[][] data_jpto() {
return new Object[][] {
// https://www.boj.or.jp/en/about/outline/holi.htm/
// http://web.archive.org/web/20110513190217/http://www.boj.or.jp/en/about/outline/holi.htm/
// https://www.japanspecialist.co.uk/travel-tips/national-holidays-in-japan/
{1999, mds(1999, md(1, 1), md(1, 2), md(1, 3), md(1, 15), md(2, 11), md(3, 22), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 20), md(9, 15), md(9, 23), md(10, 11), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2000, mds(2000, md(1, 1), md(1, 2), md(1, 3), md(1, 10), md(2, 11), md(3, 20), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 20), md(9, 15), md(9, 23), md(10, 9), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2001, mds(2001, md(1, 1), md(1, 2), md(1, 3), md(1, 8), md(2, 12), md(3, 20), md(4, 30), md(5, 3), md(5, 4), md(5, 5),
md(7, 20), md(9, 15), md(9, 24), md(10, 8), md(11, 3), md(11, 23), md(12, 24), md(12, 31))},
{2002, mds(2002, md(1, 1), md(1, 2), md(1, 3), md(1, 14), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 6),
md(7, 20), md(9, 16), md(9, 23), md(10, 14), md(11, 4), md(11, 23), md(12, 23), md(12, 31))},
{2003, mds(2003, md(1, 1), md(1, 2), md(1, 3), md(1, 13), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 21), md(9, 15), md(9, 23), md(10, 13), md(11, 3), md(11, 24), md(12, 23), md(12, 31))},
{2004, mds(2004, md(1, 1), md(1, 2), md(1, 3), md(1, 12), md(2, 11), md(3, 20), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 19), md(9, 20), md(9, 23), md(10, 11), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2005, mds(2005, md(1, 1), md(1, 2), md(1, 3), md(1, 10), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 18), md(9, 19), md(9, 23), md(10, 10), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2006, mds(2006, md(1, 1), md(1, 2), md(1, 3), md(1, 9), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 17), md(9, 18), md(9, 23), md(10, 9), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2011, mds(2011, md(1, 1), md(1, 2), md(1, 3), md(1, 10), md(2, 11), md(3, 21), md(4, 29), md(5, 3), md(5, 4), md(5, 5),
md(7, 18), md(9, 19), md(9, 23), md(10, 10), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2012, mds(2012, md(1, 1), md(1, 2), md(1, 3), md(1, 9), md(2, 11), md(3, 20), md(4, 30), md(5, 3), md(5, 4), md(5, 5),
md(7, 16), md(9, 17), md(9, 22), md(10, 8), md(11, 3), md(11, 23), md(12, 24), md(12, 31))},
{2013, mds(2013, md(1, 1), md(1, 2), md(1, 3), md(1, 14), md(2, 11), md(3, 20), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6),
md(7, 15), md(9, 16), md(9, 23), md(10, 14), md(11, 4), md(11, 23), md(12, 23), md(12, 31))},
{2014, mds(2014, md(1, 1), md(1, 2), md(1, 3), md(1, 13), md(2, 11), md(3, 21), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6),
md(7, 21), md(9, 15), md(9, 23), md(10, 13), md(11, 3), md(11, 24), md(12, 23), md(12, 31))},
{2015, mds(2015, md(1, 1), md(1, 2), md(1, 3), md(1, 12), md(2, 11), md(3, 21), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6),
md(7, 20), md(9, 21), md(9, 22), md(9, 23), md(10, 12), md(11, 3), md(11, 23), md(12, 23), md(12, 31))},
{2018, mds(2018, md(1, 1), md(1, 2), md(1, 3), md(1, 8), md(2, 12), md(3, 21), md(4, 30),
md(5, 3), md(5, 4), md(5, 5), md(7, 16), md(8, 11), md(9, 17), md(9, 24),
md(10, 8), md(11, 3), md(11, 23), md(12, 23), md(12, 24), md(12, 31))},
{2019, mds(2019, md(1, 1), md(1, 2), md(1, 3), md(1, 14), md(2, 11), md(3, 21), md(4, 29), md(4, 30),
md(5, 1), md(5, 2), md(5, 3), md(5, 4), md(5, 5), md(5, 6), md(7, 15), md(8, 12), md(9, 16), md(9, 23),
md(10, 14), md(10, 22), md(11, 4), md(11, 23), md(12, 31))},
{2020, mds(2020, md(1, 1), md(1, 2), md(1, 3), md(1, 13), md(2, 11), md(2, 24), md(3, 20), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(5, 6), md(7, 23), md(7, 24), md(8, 10), md(9, 21), md(9, 22),
md(11, 3), md(11, 23), md(12, 31))},
{2021, mds(2021, md(1, 1), md(1, 11), md(2, 11), md(2, 23), md(3, 20), md(4, 29),
md(5, 3), md(5, 4), md(5, 5), md(7, 22), md(7, 23), md(8, 9), md(9, 20),
md(9, 23), md(11, 3), md(11, 23), md(12, 31))},
};
}
@ParameterizedTest
@MethodSource("data_jpto")
public void test_jpto(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(JPTO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar AUSY = GlobalHolidayCalendars.generateSydney();
public static Object[][] data_ausy() {
return new Object[][] {
{2012, mds(2012, md(1, 1), md(1, 2), md(1, 26), md(4, 6), md(4, 7), md(4, 8), md(4, 9),
md(4, 25), md(6, 11), md(8, 6), md(10, 1), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(1, 26), md(1, 28), md(3, 29), md(3, 30), md(3, 31), md(4, 1),
md(4, 25), md(6, 10), md(8, 5), md(10, 7), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(1, 26), md(1, 27), md(4, 18), md(4, 19), md(4, 20), md(4, 21),
md(4, 25), md(6, 9), md(8, 4), md(10, 6), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(1, 26), md(4, 3), md(4, 4), md(4, 5), md(4, 6), md(4, 25),
md(6, 8), md(8, 3), md(10, 5), md(12, 25), md(12, 26), md(12, 27), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 26), md(3, 25), md(3, 26), md(3, 27), md(3, 28),
md(4, 25), md(6, 13), md(8, 1), md(10, 3), md(12, 25), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 1), md(1, 2), md(1, 26), md(4, 14), md(4, 15), md(4, 16), md(4, 17),
md(4, 25), md(6, 12), md(8, 7), md(10, 2), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_ausy")
public void test_ausy(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(AUSY.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar BRBD = GlobalHolidayCalendars.generateBrazil();
public static Object[][] data_brbd() {
// http://www.planalto.gov.br/ccivil_03/leis/2002/L10607.htm
// fixing data
return new Object[][] {
{2013, mds(2013, md(1, 1), md(2, 11), md(2, 12), md(3, 29), md(4, 21), md(5, 1),
md(5, 30), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
{2014, mds(2014, md(1, 1), md(3, 3), md(3, 4), md(4, 18), md(4, 21), md(5, 1),
md(6, 19), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
{2015, mds(2015, md(1, 1), md(2, 16), md(2, 17), md(4, 3), md(4, 21), md(5, 1),
md(6, 4), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
{2016, mds(2016, md(1, 1), md(2, 8), md(2, 9), md(3, 25), md(4, 21), md(5, 1),
md(5, 26), md(9, 7), md(10, 12), md(11, 2), md(11, 15), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_brbd")
public void test_brbd(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(BRBD.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CAMO = GlobalHolidayCalendars.generateMontreal();
public static Object[][] data_camo() {
// https://www.bankofcanada.ca/about/contact-information/bank-of-canada-holiday-schedule/
// also indicate day after new year and boxing day, but no other sources for this
return new Object[][] {
{2017, mds(2017, md(1, 2), md(4, 14),
md(5, 22), md(6, 26), md(7, 3), md(9, 4), md(10, 9), md(12, 25))},
{2018, mds(2018, md(1, 1), md(3, 30),
md(5, 21), md(6, 25), md(7, 2), md(9, 3), md(10, 8), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_camo")
public void test_camo(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CAMO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CATO = GlobalHolidayCalendars.generateToronto();
public static Object[][] data_cato() {
return new Object[][] {
{2009, mds(2009, md(1, 1), md(2, 16), md(4, 10),
md(5, 18), md(7, 1), md(8, 3), md(9, 7), md(10, 12), md(11, 11), md(12, 25), md(12, 28))},
{2010, mds(2010, md(1, 1), md(2, 15), md(4, 2),
md(5, 24), md(7, 1), md(8, 2), md(9, 6), md(10, 11), md(11, 11), md(12, 27), md(12, 28))},
{2011, mds(2011, md(1, 3), md(2, 21), md(4, 22),
md(5, 23), md(7, 1), md(8, 1), md(9, 5), md(10, 10), md(11, 11), md(12, 26), md(12, 27))},
{2012, mds(2012, md(1, 2), md(2, 20), md(4, 6),
md(5, 21), md(7, 2), md(8, 6), md(9, 3), md(10, 8), md(11, 12), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(2, 18), md(3, 29),
md(5, 20), md(7, 1), md(8, 5), md(9, 2), md(10, 14), md(11, 11), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(2, 17), md(4, 18),
md(5, 19), md(7, 1), md(8, 4), md(9, 1), md(10, 13), md(11, 11), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(2, 16), md(4, 3),
md(5, 18), md(7, 1), md(8, 3), md(9, 7), md(10, 12), md(11, 11), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(2, 15), md(3, 25),
md(5, 23), md(7, 1), md(8, 1), md(9, 5), md(10, 10), md(11, 11), md(12, 26), md(12, 27))},
};
}
@ParameterizedTest
@MethodSource("data_cato")
public void test_cato(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CATO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar CZPR = GlobalHolidayCalendars.generatePrague();
public static Object[][] data_czpr() {
// official data from Czech National Bank
// https://www.cnb.cz/en/public/media_service/schedules/media_svatky.html
return new Object[][] {
{2008, mds(2008, md(1, 1), md(3, 24), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2009, mds(2009, md(1, 1), md(4, 13), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2010, mds(2010, md(1, 1), md(4, 5), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2011, mds(2011, md(1, 1), md(4, 25), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2012, mds(2012, md(1, 1), md(4, 9), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2013, mds(2013, md(1, 1), md(4, 1), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(4, 21), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(4, 6), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(3, 25), md(3, 28), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
{2017, mds(2017, md(1, 1), md(4, 14), md(4, 17), md(5, 1), md(5, 8),
md(7, 5), md(7, 6), md(9, 28), md(10, 28), md(11, 17), md(12, 24), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_czpr")
public void test_czpr(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(CZPR.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar DKCO = GlobalHolidayCalendars.generateCopenhagen();
public static Object[][] data_dkco() {
// official data from Danish Bankers association via web archive
return new Object[][] {
{2013, mds(2013, md(1, 1), md(3, 28), md(3, 29), md(4, 1),
md(4, 26), md(5, 9), md(5, 10), md(5, 20), md(6, 5), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2014, mds(2014, md(1, 1), md(4, 17), md(4, 18), md(4, 21),
md(5, 16), md(5, 29), md(5, 30), md(6, 5), md(6, 9), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(4, 2), md(4, 3), md(4, 6),
md(5, 1), md(5, 14), md(5, 15), md(5, 25), md(6, 5), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2016, mds(2016, md(1, 1), md(3, 24), md(3, 25), md(3, 28),
md(4, 22), md(5, 5), md(5, 6), md(5, 16), md(6, 5), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
};
}
@ParameterizedTest
@MethodSource("data_dkco")
public void test_dkco(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(DKCO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar HUBU = GlobalHolidayCalendars.generateBudapest();
public static Object[][] data_hubu() {
// http://www.mnb.hu/letoltes/bubor2.xls
// http://holidays.kayaposoft.com/public_holidays.php?year=2013&country=hun®ion=#
return new Object[][] {
{2012, mds(2012, md(3, 15), md(3, 16), md(4, 9), md(4, 30), md(5, 1), md(5, 28),
md(8, 20), md(10, 22), md(10, 23), md(11, 1), md(11, 2), md(12, 24), md(12, 25), md(12, 26), md(12, 31)),
ImmutableList.of(date(2012, 3, 24), date(2012, 5, 5), date(2012, 10, 27),
date(2012, 11, 10), date(2012, 12, 15), date(2012, 12, 29))},
{2013, mds(2013, md(1, 1), md(3, 15), md(4, 1), md(5, 1), md(5, 20),
md(8, 19), md(8, 20), md(10, 23), md(11, 1), md(12, 24), md(12, 25), md(12, 26), md(12, 27)),
ImmutableList.of(date(2013, 8, 24), date(2013, 12, 7), date(2013, 12, 21))},
{2014, mds(2014, md(1, 1), md(3, 15), md(4, 21), md(5, 1), md(5, 2),
md(6, 9), md(8, 20), md(10, 23), md(10, 24), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2014, 5, 10), date(2014, 10, 18))},
{2015, mds(2015, md(1, 1), md(1, 2), md(3, 15), md(4, 6), md(5, 1), md(5, 25),
md(8, 20), md(8, 21), md(10, 23), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2015, 1, 10), date(2015, 8, 8), date(2015, 12, 12))},
{2016, mds(2016, md(1, 1), md(3, 14), md(3, 15), md(3, 28), md(5, 1), md(5, 16),
md(10, 31), md(11, 1), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2016, 3, 5), date(2016, 10, 15))},
{2020, mds(2020, md(1, 1), md(3, 15), md(4, 10), md(4, 13), md(5, 1), md(6, 1),
md(8, 20), md(8, 21), md(10, 23), md(12, 24), md(12, 25), md(12, 26)),
ImmutableList.of(date(2020, 8, 29), date(2020, 12, 12))},
};
}
@ParameterizedTest
@MethodSource("data_hubu")
public void test_hubu(int year, List<LocalDate> holidays, List<LocalDate> workDays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = (holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY) &&
!workDays.contains(date);
assertThat(HUBU.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar MXMC = GlobalHolidayCalendars.generateMexicoCity();
public static Object[][] data_mxmc() {
// http://www.banxico.org.mx/SieInternet/consultarDirectorioInternetAction.do?accion=consultarCuadro&idCuadro=CF111&locale=en
return new Object[][] {
{2012, mds(2012, md(1, 1), md(2, 6), md(3, 19), md(4, 5), md(4, 6),
md(5, 1), md(9, 16), md(11, 2), md(11, 19), md(12, 12), md(12, 25))},
{2013, mds(2013, md(1, 1), md(2, 4), md(3, 18), md(3, 28), md(3, 29),
md(5, 1), md(9, 16), md(11, 2), md(11, 18), md(12, 12), md(12, 25))},
{2014, mds(2014, md(1, 1), md(2, 3), md(3, 17), md(4, 17), md(4, 18),
md(5, 1), md(9, 16), md(11, 2), md(11, 17), md(12, 12), md(12, 25))},
{2015, mds(2015, md(1, 1), md(2, 2), md(3, 16), md(4, 2), md(4, 3),
md(5, 1), md(9, 16), md(11, 2), md(11, 16), md(12, 12), md(12, 25))},
{2016, mds(2016, md(1, 1), md(2, 1), md(3, 21), md(3, 24), md(3, 25),
md(5, 1), md(9, 16), md(11, 2), md(11, 21), md(12, 12), md(12, 25))},
};
}
@ParameterizedTest
@MethodSource("data_mxmc")
public void test_mxmc(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(MXMC.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NOOS = GlobalHolidayCalendars.generateOslo();
public static Object[][] data_noos() {
// official data from Oslo Bors via web archive
return new Object[][] {
{2009, mds(2009, md(1, 1), md(4, 9), md(4, 10), md(4, 13),
md(5, 1), md(5, 21), md(6, 1), md(12, 24), md(12, 25), md(12, 31))},
{2011, mds(2011, md(4, 21), md(4, 22), md(4, 25),
md(5, 17), md(6, 2), md(6, 13), md(12, 26))},
{2012, mds(2012, md(4, 5), md(4, 6), md(4, 9),
md(5, 1), md(5, 17), md(5, 28), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2013, mds(2013, md(1, 1), md(3, 28), md(3, 29), md(4, 1),
md(5, 1), md(5, 9), md(5, 17), md(5, 20), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2014, mds(2014, md(1, 1), md(4, 17), md(4, 18), md(4, 21),
md(5, 1), md(5, 17), md(5, 29), md(6, 9), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(4, 2), md(4, 3), md(4, 6),
md(5, 1), md(5, 14), md(5, 25), md(12, 24), md(12, 25), md(12, 31))},
{2016, mds(2016, md(1, 1), md(3, 24), md(3, 25), md(3, 28),
md(5, 5), md(5, 16), md(5, 17), md(12, 26))},
{2017, mds(2017, md(4, 13), md(4, 14), md(4, 17),
md(5, 1), md(5, 17), md(5, 25), md(6, 5), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_noos")
public void test_noos(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NOOS.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NZAU = GlobalHolidayCalendars.generateAuckland();
public static Object[][] data_nzau() {
// https://www.govt.nz/browse/work/public-holidays-and-work/public-holidays-and-anniversary-dates/
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/dates-for-previous-years/
return new Object[][] {
{2015, mds(2015, md(1, 1), md(1, 2), md(1, 26), md(2, 6), md(4, 3), md(4, 6),
md(4, 27), md(6, 1), md(10, 26), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 4), md(2, 1), md(2, 8), md(3, 25), md(3, 28),
md(4, 25), md(6, 6), md(10, 24), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 2), md(1, 3), md(1, 30), md(2, 6), md(4, 14), md(4, 17),
md(4, 25), md(6, 5), md(10, 23), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 2), md(1, 29), md(2, 6), md(3, 30), md(4, 2),
md(4, 25), md(6, 4), md(10, 22), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_nzau")
public void test_nzau(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NZAU.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NZWE = GlobalHolidayCalendars.generateWellington();
public static Object[][] data_nzwe() {
// https://www.govt.nz/browse/work/public-holidays-and-work/public-holidays-and-anniversary-dates/
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/dates-for-previous-years/
return new Object[][] {
{2015, mds(2015, md(1, 1), md(1, 2), md(1, 19), md(2, 6), md(4, 3), md(4, 6),
md(4, 27), md(6, 1), md(10, 26), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 4), md(1, 25), md(2, 8), md(3, 25), md(3, 28),
md(4, 25), md(6, 6), md(10, 24), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 2), md(1, 3), md(1, 23), md(2, 6), md(4, 14), md(4, 17),
md(4, 25), md(6, 5), md(10, 23), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 2), md(1, 22), md(2, 6), md(3, 30), md(4, 2),
md(4, 25), md(6, 4), md(10, 22), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_nzwe")
public void test_nzwe(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NZWE.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar NZBD = GlobalHolidayCalendars.generateNewZealand();
public static Object[][] data_nzbd() {
// https://www.govt.nz/browse/work/public-holidays-and-work/public-holidays-and-anniversary-dates/
// https://www.employment.govt.nz/leave-and-holidays/public-holidays/public-holidays-and-anniversary-dates/dates-for-previous-years/
return new Object[][] {
{2015, mds(2015, md(1, 1), md(1, 2), md(2, 6), md(4, 3), md(4, 6),
md(4, 27), md(6, 1), md(10, 26), md(12, 25), md(12, 28))},
{2016, mds(2016, md(1, 1), md(1, 4), md(2, 8), md(3, 25), md(3, 28),
md(4, 25), md(6, 6), md(10, 24), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 2), md(1, 3), md(2, 6), md(4, 14), md(4, 17),
md(4, 25), md(6, 5), md(10, 23), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 2), md(2, 6), md(3, 30), md(4, 2),
md(4, 25), md(6, 4), md(10, 22), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_nzbd")
public void test_nzbd(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(NZBD.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar PLWA = GlobalHolidayCalendars.generateWarsaw();
public static Object[][] data_plwa() {
// based on government law data and stock exchange holidays
return new Object[][] {
{2013, mds(2013, md(1, 1), md(4, 1),
md(5, 1), md(5, 3), md(5, 30), md(8, 15), md(11, 1), md(11, 11), md(12, 24), md(12, 25), md(12, 26))},
{2014, mds(2014, md(1, 1), md(1, 6), md(4, 21),
md(5, 1), md(6, 19), md(8, 15), md(11, 11), md(12, 24), md(12, 25), md(12, 26))},
{2015, mds(2015, md(1, 1), md(1, 6), md(4, 6),
md(5, 1), md(6, 4), md(11, 11), md(12, 24), md(12, 25), md(12, 31))},
{2016, mds(2016, md(1, 1), md(1, 6), md(3, 28),
md(5, 3), md(5, 26), md(8, 15), md(11, 1), md(11, 11), md(12, 26))},
{2017, mds(2017, md(1, 6), md(4, 17),
md(5, 1), md(5, 3), md(6, 15), md(8, 15), md(11, 1), md(12, 25), md(12, 26))},
{2018, mds(2018, md(1, 1), md(1, 6), md(4, 1), md(4, 2), md(5, 1), md(5, 3),
md(5, 20), md(5, 31), md(8, 15), md(11, 1), md(11, 11), md(11, 12), md(12, 24), md(12, 25), md(12, 26), md(12, 31))}
};
}
@ParameterizedTest
@MethodSource("data_plwa")
public void test_plwa(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(PLWA.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar SEST = GlobalHolidayCalendars.generateStockholm();
public static Object[][] data_sest() {
// official data from published fixing dates
return new Object[][] {
{2014, mds(2014, md(1, 1), md(1, 6), md(4, 18), md(4, 21),
md(5, 1), md(5, 29), md(6, 6), md(6, 20), md(12, 24), md(12, 25), md(12, 26), md(12, 31))},
{2015, mds(2015, md(1, 1), md(1, 6), md(4, 3), md(4, 6),
md(5, 1), md(5, 14), md(6, 19), md(12, 24), md(12, 25), md(12, 31))},
{2016, mds(2016, md(1, 1), md(1, 6), md(3, 25), md(3, 28),
md(5, 5), md(6, 6), md(6, 24), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_sest")
public void test_sest(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(SEST.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static final HolidayCalendar ZAJO = GlobalHolidayCalendars.generateJohannesburg();
public static Object[][] data_zajo() {
// http://www.gov.za/about-sa/public-holidays
// https://web.archive.org/web/20151230214958/http://www.gov.za/about-sa/public-holidays
return new Object[][] {
{2015, mds(2015, md(1, 1), md(3, 21), md(4, 3), md(4, 6), md(4, 27), md(5, 1),
md(6, 16), md(8, 10), md(9, 24), md(12, 16), md(12, 25), md(12, 26))},
{2016, mds(2016, md(1, 1), md(3, 21), md(3, 25), md(3, 28), md(4, 27), md(5, 2),
md(6, 16), md(8, 3), md(8, 9), md(9, 24), md(12, 16), md(12, 26), md(12, 27))},
{2017, mds(2017, md(1, 1), md(1, 2), md(3, 21), md(4, 14), md(4, 17), md(4, 27), md(5, 1),
md(6, 16), md(8, 9), md(9, 25), md(12, 16), md(12, 16), md(12, 25), md(12, 26))},
};
}
@ParameterizedTest
@MethodSource("data_zajo")
public void test_zajo(int year, List<LocalDate> holidays) {
LocalDate date = LocalDate.of(year, 1, 1);
int len = date.lengthOfYear();
for (int i = 0; i < len; i++) {
boolean isHoliday = holidays.contains(date) || date.getDayOfWeek() == SATURDAY || date.getDayOfWeek() == SUNDAY;
assertThat(ZAJO.isHoliday(date)).as(date.toString()).isEqualTo(isHoliday);
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
@Test
public void test_combinedWith() {
HolidayCalendar combined =
ImmutableHolidayCalendar.combined((ImmutableHolidayCalendar) JPTO, (ImmutableHolidayCalendar) USNY);
LocalDate date = LocalDate.of(1950, 1, 1);
while (date.getYear() < 2040) {
assertThat(combined.isHoliday(date)).as("Date: " + date).isEqualTo(JPTO.isHoliday(date) || USNY.isHoliday(date));
date = date.plusDays(1);
}
}
//-------------------------------------------------------------------------
private static List<LocalDate> mds(int year, MonthDay... monthDays) {
List<LocalDate> holidays = new ArrayList<>();
for (MonthDay md : monthDays) {
holidays.add(md.atYear(year));
}
return holidays;
}
private static MonthDay md(int month, int day) {
return MonthDay.of(month, day);
}
//-------------------------------------------------------------------------
public static void coverage() {
coverPrivateConstructor(GlobalHolidayCalendars.class);
}
}
|
Add test for AUSY holiday in 2022 (#2428)
|
modules/basics/src/test/java/com/opengamma/strata/basics/date/GlobalHolidayCalendarsTest.java
|
Add test for AUSY holiday in 2022 (#2428)
|
|
Java
|
apache-2.0
|
c377dac0afc27077a73a0110d82aef523bb69b2b
| 0
|
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.wm.impl.status;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.actionSystem.impl.SimpleDataContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.CustomStatusBarWidget;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.StatusBarWidget;
import com.intellij.ui.ClickListener;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Alarm;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
public abstract class EditorBasedStatusBarPopup extends EditorBasedWidget implements StatusBarWidget.Multiframe, CustomStatusBarWidget {
private final TextPanel.WithIconAndArrows myComponent;
private boolean actionEnabled;
private final Alarm update;
// store editor here to avoid expensive and EDT-only getSelectedEditor() retrievals
private volatile Reference<Editor> myEditor = new WeakReference<>(null);
public EditorBasedStatusBarPopup(@NotNull Project project) {
super(project);
update = new Alarm(this);
myComponent = new TextPanel.WithIconAndArrows() {
@Override
protected boolean shouldPaintIconAndArrows() {
return actionEnabled;
}
};
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
update();
showPopup(e);
return true;
}
}.installOn(myComponent);
myComponent.setBorder(WidgetBorder.WIDE);
}
@Override
public void selectionChanged(@NotNull FileEditorManagerEvent event) {
if (ApplicationManager.getApplication().isUnitTestMode()) return;
VirtualFile newFile = event.getNewFile();
fileChanged(newFile);
}
private void fileChanged(VirtualFile newFile) {
Project project = getProject();
assert project != null;
FileEditor fileEditor = newFile == null ? null : FileEditorManager.getInstance(project).getSelectedEditor(newFile);
Editor editor = fileEditor instanceof TextEditor ? ((TextEditor)fileEditor).getEditor() : null;
myEditor = new WeakReference<>(editor);
update();
}
@Override
public void fileOpened(@NotNull FileEditorManager source, @NotNull VirtualFile file) {
fileChanged(file);
}
@Override
public StatusBarWidget copy() {
return createInstance(getProject());
}
@Nullable
@Override
public WidgetPresentation getPresentation(@NotNull PlatformType type) {
return null;
}
@Override
public void install(@NotNull StatusBar statusBar) {
super.install(statusBar);
registerCustomListeners();
EditorFactory.getInstance().getEventMulticaster().addDocumentListener(new DocumentListener() {
@Override
public void documentChanged(DocumentEvent e) {
Document document = e.getDocument();
updateForDocument(document);
}
}, this);
}
protected void updateForDocument(@Nullable("null means update anyway") Document document) {
Editor selectedEditor = myEditor.get();
if (document != null && (selectedEditor == null || selectedEditor.getDocument() != document)) return;
update();
}
protected void updateForFile(@Nullable("null means update anyway") VirtualFile file) {
if (file == null) {
update();
}
else {
updateForDocument(FileDocumentManager.getInstance().getCachedDocument(file));
}
}
private void showPopup(@NotNull MouseEvent e) {
if (!actionEnabled) {
return;
}
DataContext dataContext = getContext();
ListPopup popup = createPopup(dataContext);
if (popup != null) {
Dimension dimension = popup.getContent().getPreferredSize();
Point at = new Point(0, -dimension.height);
popup.show(new RelativePoint(e.getComponent(), at));
Disposer.register(this, popup); // destroy popup on unexpected project close
}
}
@NotNull
protected DataContext getContext() {
Editor editor = getEditor();
DataContext parent = DataManager.getInstance().getDataContext((Component)myStatusBar);
VirtualFile selectedFile = getSelectedFile();
return SimpleDataContext.getSimpleContext(
ContainerUtil.<String, Object>immutableMapBuilder()
.put(CommonDataKeys.VIRTUAL_FILE.getName(), selectedFile)
.put(CommonDataKeys.VIRTUAL_FILE_ARRAY.getName(), new VirtualFile[] {selectedFile})
.put(CommonDataKeys.PROJECT.getName(), getProject())
.put(PlatformDataKeys.CONTEXT_COMPONENT.getName(), editor == null ? null : editor.getComponent())
.build(),
parent);
}
@Override
public JComponent getComponent() {
return myComponent;
}
public void update() {
if (update.isDisposed()) return;
update.cancelAllRequests();
update.addRequest(() -> {
if (isDisposed()) return;
VirtualFile file = getSelectedFile();
actionEnabled = false;
String widgetText;
String toolTipText;
WidgetState state = getWidgetState(file);
if (state.hidden) {
myComponent.setVisible(false);
return;
}
myComponent.setVisible(true);
actionEnabled = state.actionEnabled && file != null && file.isWritable();
widgetText = state.text;
toolTipText = state.toolTip;
if (actionEnabled) {
myComponent.setForeground(UIUtil.getActiveTextColor());
myComponent.setTextAlignment(Component.LEFT_ALIGNMENT);
}
else {
myComponent.setForeground(UIUtil.getInactiveTextColor());
myComponent.setTextAlignment(Component.CENTER_ALIGNMENT);
}
myComponent.setIcon(state.icon);
myComponent.setToolTipText(toolTipText);
myComponent.setText(widgetText);
if (myStatusBar != null) {
myStatusBar.updateWidget(ID());
}
}, 200, ModalityState.any());
}
protected static class WidgetState {
protected WidgetState(String toolTip, String text, boolean actionEnabled) {
this.toolTip = toolTip;
this.text = text;
this.actionEnabled = actionEnabled;
}
public void setHidden(boolean hidden) {
this.hidden = hidden;
}
public void setIcon(Icon icon) {
this.icon = icon;
}
private final String toolTip;
private final String text;
private final boolean actionEnabled;
private boolean hidden;
private Icon icon;
}
@NotNull
protected abstract WidgetState getWidgetState(@Nullable VirtualFile file);
@Nullable
protected abstract ListPopup createPopup(DataContext context);
protected abstract void registerCustomListeners();
@NotNull
protected abstract StatusBarWidget createInstance(Project project);
}
|
platform/platform-impl/src/com/intellij/openapi/wm/impl/status/EditorBasedStatusBarPopup.java
|
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.wm.impl.status;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.actionSystem.impl.SimpleDataContext;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.event.DocumentListener;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.FileEditorManagerEvent;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.ListPopup;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.CustomStatusBarWidget;
import com.intellij.openapi.wm.StatusBar;
import com.intellij.openapi.wm.StatusBarWidget;
import com.intellij.ui.ClickListener;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.util.Alarm;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseEvent;
public abstract class EditorBasedStatusBarPopup extends EditorBasedWidget implements StatusBarWidget.Multiframe, CustomStatusBarWidget {
private final TextPanel.WithIconAndArrows myComponent;
private boolean actionEnabled;
private final Alarm update;
public EditorBasedStatusBarPopup(@NotNull Project project) {
super(project);
update = new Alarm(this);
myComponent = new TextPanel.WithIconAndArrows() {
@Override
protected boolean shouldPaintIconAndArrows() {
return actionEnabled;
}
};
new ClickListener() {
@Override
public boolean onClick(@NotNull MouseEvent e, int clickCount) {
update();
showPopup(e);
return true;
}
}.installOn(myComponent);
myComponent.setBorder(WidgetBorder.WIDE);
}
@Override
public void selectionChanged(@NotNull FileEditorManagerEvent event) {
if (ApplicationManager.getApplication().isUnitTestMode()) return;
fileChanged();
}
private void fileChanged() {
update();
}
@Override
public void fileOpened(@NotNull FileEditorManager source, @NotNull VirtualFile file) {
fileChanged();
}
@Override
public StatusBarWidget copy() {
return createInstance(getProject());
}
@Nullable
@Override
public WidgetPresentation getPresentation(@NotNull PlatformType type) {
return null;
}
@Override
public void install(@NotNull StatusBar statusBar) {
super.install(statusBar);
registerCustomListeners();
EditorFactory.getInstance().getEventMulticaster().addDocumentListener(new DocumentListener() {
@Override
public void documentChanged(DocumentEvent e) {
Document document = e.getDocument();
updateForDocument(document);
}
}, this);
}
protected void updateForDocument(@Nullable("null means update anyway") Document document) {
Editor selectedEditor = getEditor();
if (document != null && (selectedEditor == null || selectedEditor.getDocument() != document)) return;
update();
}
protected void updateForFile(@Nullable("null means update anyway") VirtualFile file) {
if (file == null) {
update();
}
else {
updateForDocument(FileDocumentManager.getInstance().getCachedDocument(file));
}
}
private void showPopup(@NotNull MouseEvent e) {
if (!actionEnabled) {
return;
}
DataContext dataContext = getContext();
ListPopup popup = createPopup(dataContext);
if (popup != null) {
Dimension dimension = popup.getContent().getPreferredSize();
Point at = new Point(0, -dimension.height);
popup.show(new RelativePoint(e.getComponent(), at));
Disposer.register(this, popup); // destroy popup on unexpected project close
}
}
@NotNull
protected DataContext getContext() {
Editor editor = getEditor();
DataContext parent = DataManager.getInstance().getDataContext((Component)myStatusBar);
VirtualFile selectedFile = getSelectedFile();
return SimpleDataContext.getSimpleContext(
ContainerUtil.<String, Object>immutableMapBuilder()
.put(CommonDataKeys.VIRTUAL_FILE.getName(), selectedFile)
.put(CommonDataKeys.VIRTUAL_FILE_ARRAY.getName(), new VirtualFile[] {selectedFile})
.put(CommonDataKeys.PROJECT.getName(), getProject())
.put(PlatformDataKeys.CONTEXT_COMPONENT.getName(), editor == null ? null : editor.getComponent())
.build(),
parent);
}
@Override
public JComponent getComponent() {
return myComponent;
}
public void update() {
if (update.isDisposed()) return;
update.cancelAllRequests();
update.addRequest(() -> {
if (isDisposed()) return;
VirtualFile file = getSelectedFile();
actionEnabled = false;
String widgetText;
String toolTipText;
WidgetState state = getWidgetState(file);
if (state.hidden) {
myComponent.setVisible(false);
return;
}
myComponent.setVisible(true);
actionEnabled = state.actionEnabled && file != null && file.isWritable();
widgetText = state.text;
toolTipText = state.toolTip;
if (actionEnabled) {
myComponent.setForeground(UIUtil.getActiveTextColor());
myComponent.setTextAlignment(Component.LEFT_ALIGNMENT);
}
else {
myComponent.setForeground(UIUtil.getInactiveTextColor());
myComponent.setTextAlignment(Component.CENTER_ALIGNMENT);
}
myComponent.setIcon(state.icon);
myComponent.setToolTipText(toolTipText);
myComponent.setText(widgetText);
if (myStatusBar != null) {
myStatusBar.updateWidget(ID());
}
}, 200, ModalityState.any());
}
protected static class WidgetState {
protected WidgetState(String toolTip, String text, boolean actionEnabled) {
this.toolTip = toolTip;
this.text = text;
this.actionEnabled = actionEnabled;
}
public void setHidden(boolean hidden) {
this.hidden = hidden;
}
public void setIcon(Icon icon) {
this.icon = icon;
}
private final String toolTip;
private final String text;
private final boolean actionEnabled;
private boolean hidden;
private Icon icon;
}
@NotNull
protected abstract WidgetState getWidgetState(@Nullable VirtualFile file);
@Nullable
protected abstract ListPopup createPopup(DataContext context);
protected abstract void registerCustomListeners();
@NotNull
protected abstract StatusBarWidget createInstance(Project project);
}
|
Revert "drop cached editor from EditorBasedStatusBarPopup"
This reverts commit a4134d0
see the discussion in IDEA-CR-31987
|
platform/platform-impl/src/com/intellij/openapi/wm/impl/status/EditorBasedStatusBarPopup.java
|
Revert "drop cached editor from EditorBasedStatusBarPopup"
|
|
Java
|
apache-2.0
|
56f41b05dfa7419bcb9ae5d6ab8dcc4c335710e2
| 0
|
gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa,gxa/gxa
|
/*
* Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package uk.ac.ebi.gxa.requesthandlers.query;
import ae3.dao.AtlasSolrDAO;
import ae3.model.AtlasExperiment;
import ae3.model.AtlasGene;
import ae3.service.AtlasStatisticsQueryService;
import ae3.service.structuredquery.Constants;
import uk.ac.ebi.gxa.efo.Efo;
import uk.ac.ebi.gxa.efo.EfoTerm;
import uk.ac.ebi.gxa.properties.AtlasProperties;
import uk.ac.ebi.gxa.requesthandlers.base.AbstractRestRequestHandler;
import uk.ac.ebi.gxa.statistics.StatisticsQueryUtils;
import uk.ac.ebi.gxa.statistics.StatisticsType;
import uk.ac.ebi.gxa.utils.EscapeUtil;
import uk.ac.ebi.microarray.atlas.model.ExpressionAnalysis;
import javax.servlet.http.HttpServletRequest;
import java.util.*;
/**
* @author pashky
*/
public class ExperimentsPopupRequestHandler extends AbstractRestRequestHandler {
private AtlasSolrDAO atlasSolrDAO;
private Efo efo;
private AtlasProperties atlasProperties;
private AtlasStatisticsQueryService atlasStatisticsQueryService;
public void setDao(AtlasSolrDAO atlasSolrDAO) {
this.atlasSolrDAO = atlasSolrDAO;
}
public void setEfo(Efo efo) {
this.efo = efo;
}
public void setAtlasProperties(AtlasProperties atlasProperties) {
this.atlasProperties = atlasProperties;
}
public void setAtlasStatisticsQueryService(AtlasStatisticsQueryService atlasStatisticsQueryService) {
this.atlasStatisticsQueryService = atlasStatisticsQueryService;
}
public Object process(HttpServletRequest request) {
Map<String, Object> jsResult = new HashMap<String, Object>();
String geneIdKey = request.getParameter("gene");
String factor = request.getParameter("ef");
String factorValue = request.getParameter("efv");
if (geneIdKey != null && factor != null && factorValue != null) {
boolean isEfo = Constants.EFO_FACTOR_NAME.equals(factor);
jsResult.put("ef", factor);
jsResult.put("eftext", atlasProperties.getCuratedEf(factor));
jsResult.put("efv", factorValue);
if (isEfo) {
EfoTerm term = efo.getTermById(factorValue);
if (term != null) {
jsResult.put("efv", term.getTerm());
}
}
AtlasSolrDAO.AtlasGeneResult result = atlasSolrDAO.getGeneById(geneIdKey);
if (!result.isFound()) {
throw new IllegalArgumentException("Atlas gene " + geneIdKey + " not found");
}
AtlasGene gene = result.getGene();
Map<String, Object> jsGene = new HashMap<String, Object>();
jsGene.put("id", geneIdKey);
jsGene.put("identifier", gene.getGeneIdentifier());
jsGene.put("name", gene.getGeneName());
jsResult.put("gene", jsGene);
Map<Long, Map<String, List<ExpressionAnalysis>>> exmap = new HashMap<Long, Map<String, List<ExpressionAnalysis>>>();
for (ExpressionAnalysis exp : isEfo ?
gene.getExpressionAnalyticsTable().findByEfoSet(efo.getTermAndAllChildrenIds(factorValue)) :
gene.getExpressionAnalyticsTable().findByEfEfv(factor, factorValue)) {
Map<String, List<ExpressionAnalysis>> efmap = exmap.get(exp.getExperimentID());
if (efmap == null) {
exmap.put(exp.getExperimentID(), efmap = new HashMap<String, List<ExpressionAnalysis>>());
}
List<ExpressionAnalysis> list = efmap.get(exp.getEfName());
if (list == null) {
efmap.put(exp.getEfName(), list = new ArrayList<ExpressionAnalysis>());
}
list.add(exp);
}
for (Map<String, List<ExpressionAnalysis>> ef : exmap.values()) {
for (List<ExpressionAnalysis> e : ef.values()) {
Collections.sort(e, new Comparator<ExpressionAnalysis>() {
public int compare(ExpressionAnalysis o1, ExpressionAnalysis o2) {
return o1.getPValAdjusted() - o2.getPValAdjusted() < 0 ? -1 : 1;
}
});
}
}
@SuppressWarnings("unchecked")
List<Map.Entry<Long, Map<String, List<ExpressionAnalysis>>>> exps =
new ArrayList<Map.Entry<Long, Map<String, List<ExpressionAnalysis>>>>(exmap.entrySet());
Collections.sort(exps, new Comparator<Map.Entry<Long, Map<String, List<ExpressionAnalysis>>>>() {
public int compare(Map.Entry<Long, Map<String, List<ExpressionAnalysis>>> o1,
Map.Entry<Long, Map<String, List<ExpressionAnalysis>>> o2) {
double minp1 = 1;
for (Map.Entry<String, List<ExpressionAnalysis>> ef : o1.getValue().entrySet()) {
minp1 = Math.min(minp1, ef.getValue().get(0).getPValAdjusted());
}
double minp2 = 1;
for (Map.Entry<String, List<ExpressionAnalysis>> ef : o2.getValue().entrySet()) {
minp2 = Math.min(minp2, ef.getValue().get(0).getPValAdjusted());
}
return minp1 < minp2 ? -1 : 1;
}
});
int numUp = 0, numDn = 0, numNo = 0;
List<Map> jsExps = new ArrayList<Map>();
for (Map.Entry<Long, Map<String, List<ExpressionAnalysis>>> e : exps) {
AtlasExperiment aexp = atlasSolrDAO.getExperimentById(e.getKey());
if (aexp != null) {
Map<String, Object> jsExp = new HashMap<String, Object>();
jsExp.put("accession", aexp.getAccession());
jsExp.put("name", aexp.getDescription());
jsExp.put("id", e.getKey());
boolean wasup = false;
boolean wasdn = false;
boolean wasno = false;
List<Map> jsEfs = new ArrayList<Map>();
for (Map.Entry<String, List<ExpressionAnalysis>> ef : e.getValue().entrySet()) {
Map<String, Object> jsEf = new HashMap<String, Object>();
jsEf.put("ef", ef.getKey());
jsEf.put("eftext", atlasProperties.getCuratedEf(ef.getKey()));
List<Map> jsEfvs = new ArrayList<Map>();
for (ExpressionAnalysis exp : ef.getValue()) {
Map<String, Object> jsEfv = new HashMap<String, Object>();
jsEfv.put("efv", exp.getEfvName());
jsEfv.put("isexp", exp.isNo() ? "no" : (exp.isUp() ? "up" : "dn"));
jsEfv.put("pvalue", exp.getPValAdjusted());
jsEfvs.add(jsEfv);
if(exp.isNo())
wasno = true;
else {
if (exp.isUp()) {
wasup = true;
}
else {
wasdn = true;
}
}
}
jsEf.put("efvs", jsEfvs);
if(!jsEfvs.isEmpty())
jsEfs.add(jsEf);
}
jsExp.put("efs", jsEfs);
if (wasup) {
++numUp;
}
if (wasdn) {
++numDn;
}
if (wasno) {
++numNo;
}
jsExps.add(jsExp);
}
}
jsResult.put("experiments", jsExps);
// gene.getExpressionAnalyticsTable() (i.e. Solr gene index) doesn't contain non-de data - obtain non-de counts from atlasStatisticsQueryService instead
// TODO: eliminate gene.getExpressionAnalyticsTable() altogether from this method - in favour of using atlasStatisticsQueryService for counts and ncdfs for pvals instead
String efv;
if (isEfo) {
efv = factorValue;
} else {
efv = EscapeUtil.encode(factor, factorValue);
}
long start = System.currentTimeMillis();
numNo = atlasStatisticsQueryService.getExperimentCountsForGene(efv, StatisticsType.NON_D_E, isEfo == StatisticsQueryUtils.EFO, Long.parseLong(geneIdKey));
log.debug("Obtained nonde counts for gene: " + geneIdKey + " and efv: " + efv + " in: " + (System.currentTimeMillis() - start) + " ms");
jsResult.put("numUp", numUp);
jsResult.put("numDn", numDn);
jsResult.put("numNo", numNo);
}
return jsResult;
}
}
|
atlas-web/src/main/java/uk/ac/ebi/gxa/requesthandlers/query/ExperimentsPopupRequestHandler.java
|
/*
* Copyright 2008-2010 Microarray Informatics Team, EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* For further details of the Gene Expression Atlas project, including source code,
* downloads and documentation, please see:
*
* http://gxa.github.com/gxa
*/
package uk.ac.ebi.gxa.requesthandlers.query;
import ae3.dao.AtlasSolrDAO;
import ae3.model.AtlasExperiment;
import ae3.model.AtlasGene;
import ae3.service.AtlasStatisticsQueryService;
import ae3.service.structuredquery.Constants;
import uk.ac.ebi.gxa.efo.Efo;
import uk.ac.ebi.gxa.efo.EfoTerm;
import uk.ac.ebi.gxa.properties.AtlasProperties;
import uk.ac.ebi.gxa.requesthandlers.base.AbstractRestRequestHandler;
import uk.ac.ebi.microarray.atlas.model.ExpressionAnalysis;
import javax.servlet.http.HttpServletRequest;
import java.util.*;
/**
* @author pashky
*/
public class ExperimentsPopupRequestHandler extends AbstractRestRequestHandler {
private AtlasSolrDAO atlasSolrDAO;
private Efo efo;
private AtlasProperties atlasProperties;
private AtlasStatisticsQueryService atlasStatisticsQueryService;
public void setDao(AtlasSolrDAO atlasSolrDAO) {
this.atlasSolrDAO = atlasSolrDAO;
}
public void setEfo(Efo efo) {
this.efo = efo;
}
public void setAtlasProperties(AtlasProperties atlasProperties) {
this.atlasProperties = atlasProperties;
}
public void setAtlasStatisticsQueryService(AtlasStatisticsQueryService atlasStatisticsQueryService) {
this.atlasStatisticsQueryService = atlasStatisticsQueryService;
}
public Object process(HttpServletRequest request) {
Map<String, Object> jsResult = new HashMap<String, Object>();
String geneIdKey = request.getParameter("gene");
String factor = request.getParameter("ef");
String factorValue = request.getParameter("efv");
if (geneIdKey != null && factor != null && factorValue != null) {
boolean isEfo = Constants.EFO_FACTOR_NAME.equals(factor);
jsResult.put("ef", factor);
jsResult.put("eftext", atlasProperties.getCuratedEf(factor));
jsResult.put("efv", factorValue);
if (isEfo) {
EfoTerm term = efo.getTermById(factorValue);
if (term != null) {
jsResult.put("efv", term.getTerm());
}
}
AtlasSolrDAO.AtlasGeneResult result = atlasSolrDAO.getGeneById(geneIdKey);
if (!result.isFound()) {
throw new IllegalArgumentException("Atlas gene " + geneIdKey + " not found");
}
AtlasGene gene = result.getGene();
Map<String, Object> jsGene = new HashMap<String, Object>();
jsGene.put("id", geneIdKey);
jsGene.put("identifier", gene.getGeneIdentifier());
jsGene.put("name", gene.getGeneName());
jsResult.put("gene", jsGene);
Map<Long, Map<String, List<ExpressionAnalysis>>> exmap = new HashMap<Long, Map<String, List<ExpressionAnalysis>>>();
for (ExpressionAnalysis exp : isEfo ?
gene.getExpressionAnalyticsTable().findByEfoSet(efo.getTermAndAllChildrenIds(factorValue)) :
gene.getExpressionAnalyticsTable().findByEfEfv(factor, factorValue)) {
Map<String, List<ExpressionAnalysis>> efmap = exmap.get(exp.getExperimentID());
if (efmap == null) {
exmap.put(exp.getExperimentID(), efmap = new HashMap<String, List<ExpressionAnalysis>>());
}
List<ExpressionAnalysis> list = efmap.get(exp.getEfName());
if (list == null) {
efmap.put(exp.getEfName(), list = new ArrayList<ExpressionAnalysis>());
}
list.add(exp);
}
for (Map<String, List<ExpressionAnalysis>> ef : exmap.values()) {
for (List<ExpressionAnalysis> e : ef.values()) {
Collections.sort(e, new Comparator<ExpressionAnalysis>() {
public int compare(ExpressionAnalysis o1, ExpressionAnalysis o2) {
return o1.getPValAdjusted() - o2.getPValAdjusted() < 0 ? -1 : 1;
}
});
}
}
@SuppressWarnings("unchecked")
List<Map.Entry<Long, Map<String, List<ExpressionAnalysis>>>> exps =
new ArrayList<Map.Entry<Long, Map<String, List<ExpressionAnalysis>>>>(exmap.entrySet());
Collections.sort(exps, new Comparator<Map.Entry<Long, Map<String, List<ExpressionAnalysis>>>>() {
public int compare(Map.Entry<Long, Map<String, List<ExpressionAnalysis>>> o1,
Map.Entry<Long, Map<String, List<ExpressionAnalysis>>> o2) {
double minp1 = 1;
for (Map.Entry<String, List<ExpressionAnalysis>> ef : o1.getValue().entrySet()) {
minp1 = Math.min(minp1, ef.getValue().get(0).getPValAdjusted());
}
double minp2 = 1;
for (Map.Entry<String, List<ExpressionAnalysis>> ef : o2.getValue().entrySet()) {
minp2 = Math.min(minp2, ef.getValue().get(0).getPValAdjusted());
}
return minp1 < minp2 ? -1 : 1;
}
});
int numUp = 0, numDn = 0, numNo = 0;
List<Map> jsExps = new ArrayList<Map>();
for (Map.Entry<Long, Map<String, List<ExpressionAnalysis>>> e : exps) {
AtlasExperiment aexp = atlasSolrDAO.getExperimentById(e.getKey());
if (aexp != null) {
Map<String, Object> jsExp = new HashMap<String, Object>();
jsExp.put("accession", aexp.getAccession());
jsExp.put("name", aexp.getDescription());
jsExp.put("id", e.getKey());
boolean wasup = false;
boolean wasdn = false;
boolean wasno = false;
List<Map> jsEfs = new ArrayList<Map>();
for (Map.Entry<String, List<ExpressionAnalysis>> ef : e.getValue().entrySet()) {
Map<String, Object> jsEf = new HashMap<String, Object>();
jsEf.put("ef", ef.getKey());
jsEf.put("eftext", atlasProperties.getCuratedEf(ef.getKey()));
List<Map> jsEfvs = new ArrayList<Map>();
for (ExpressionAnalysis exp : ef.getValue()) {
Map<String, Object> jsEfv = new HashMap<String, Object>();
jsEfv.put("efv", exp.getEfvName());
jsEfv.put("isexp", exp.isNo() ? "no" : (exp.isUp() ? "up" : "dn"));
jsEfv.put("pvalue", exp.getPValAdjusted());
jsEfvs.add(jsEfv);
if(exp.isNo())
wasno = true;
else {
if (exp.isUp()) {
wasup = true;
}
else {
wasdn = true;
}
}
}
jsEf.put("efvs", jsEfvs);
if(!jsEfvs.isEmpty())
jsEfs.add(jsEf);
}
jsExp.put("efs", jsEfs);
if (wasup) {
++numUp;
}
if (wasdn) {
++numDn;
}
if (wasno) {
++numNo;
}
jsExps.add(jsExp);
}
}
jsResult.put("experiments", jsExps);
// gene.getExpressionAnalyticsTable() (i.e. Solr gene index) doesn't contain non-de data - obtain non-de counts from atlasStatisticsQueryService instead
// TODO: eliminate gene.getExpressionAnalyticsTable() altogether from this method - in favour of using atlasStatisticsQueryService for counts and ncdfs for pvals instead
String efv;
if (isEfo) {
efv = factorValue;
} else {
efv = EscapeUtil.encode(factor, factorValue);
}
long start = System.currentTimeMillis();
numNo = atlasStatisticsQueryService.getExperimentCountsForGene(efv, StatisticsType.NON_D_E, isEfo == StatisticsQueryUtils.EFO, Long.parseLong(geneIdKey));
log.debug("Obtained nonde counts for gene: " + geneIdKey + " and efv: " + efv + " in: " + (System.currentTimeMillis() - start) + " ms");
jsResult.put("numUp", numUp);
jsResult.put("numDn", numDn);
jsResult.put("numNo", numNo);
}
return jsResult;
}
}
|
Re-instated missing imports
|
atlas-web/src/main/java/uk/ac/ebi/gxa/requesthandlers/query/ExperimentsPopupRequestHandler.java
|
Re-instated missing imports
|
|
Java
|
apache-2.0
|
ff11655b5d9bb2524afe91f12ab72410e0e17398
| 0
|
apache/jackrabbit,apache/jackrabbit,apache/jackrabbit
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.client;
import junit.framework.TestCase;
import org.apache.jackrabbit.jcr2spi.config.RepositoryConfig;
import org.apache.jackrabbit.spi.RepositoryService;
import org.apache.jackrabbit.spi.IdFactory;
import org.apache.jackrabbit.spi.NameFactory;
import org.apache.jackrabbit.spi.PathFactory;
import org.apache.jackrabbit.spi.QValueFactory;
import org.apache.jackrabbit.spi.SessionInfo;
import org.apache.jackrabbit.spi.ItemId;
import org.apache.jackrabbit.spi.QNodeDefinition;
import org.apache.jackrabbit.spi.NodeId;
import org.apache.jackrabbit.spi.QPropertyDefinition;
import org.apache.jackrabbit.spi.PropertyId;
import org.apache.jackrabbit.spi.NodeInfo;
import org.apache.jackrabbit.spi.PropertyInfo;
import org.apache.jackrabbit.spi.Batch;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.LockInfo;
import org.apache.jackrabbit.spi.QueryInfo;
import org.apache.jackrabbit.spi.EventFilter;
import org.apache.jackrabbit.spi.Path;
import org.apache.jackrabbit.spi.Subscription;
import org.apache.jackrabbit.spi.EventBundle;
import org.apache.jackrabbit.spi.QValue;
import org.apache.jackrabbit.spi.QNodeTypeDefinition;
import org.apache.jackrabbit.spi.commons.logging.Slf4jLogWriterProvider;
import org.apache.jackrabbit.client.spilogger.RepositoryConfigImpl;
import javax.jcr.RepositoryException;
import javax.jcr.Repository;
import javax.jcr.Credentials;
import javax.jcr.LoginException;
import javax.jcr.NoSuchWorkspaceException;
import javax.jcr.ItemNotFoundException;
import javax.jcr.PathNotFoundException;
import javax.jcr.ValueFormatException;
import javax.jcr.AccessDeniedException;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ItemExistsException;
import javax.jcr.InvalidItemStateException;
import javax.jcr.ReferentialIntegrityException;
import javax.jcr.MergeException;
import javax.jcr.NamespaceException;
import javax.jcr.RepositoryFactory;
import javax.jcr.query.InvalidQueryException;
import javax.jcr.lock.LockException;
import javax.jcr.version.VersionException;
import javax.jcr.nodetype.NoSuchNodeTypeException;
import javax.jcr.nodetype.ConstraintViolationException;
import javax.jcr.nodetype.InvalidNodeTypeDefinitionException;
import javax.jcr.nodetype.NodeTypeExistsException;
import java.util.Map;
import java.util.Collections;
import java.util.Iterator;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.io.InputStream;
/**
* <code>RepositoryFactoryImplTest</code>...
*/
public class RepositoryFactoryImplTest extends TestCase {
private final RepositoryFactory factory = new RepositoryFactoryImpl();
private final RepositoryService service = new RepositoryServiceImpl();
public void testGetDefaultRepository() throws RepositoryException {
try {
Repository repo = factory.getRepository(null);
assertNotNull(repo);
} catch (RepositoryException e) {
// repository on top of spi2davex can only be initialized if the
// server is running. ok.
}
try {
System.setProperty(org.apache.jackrabbit.client.spi2davex.RepositoryConfigImpl.REPOSITORY_SPI2DAVEX_URI, org.apache.jackrabbit.client.spi2davex.RepositoryConfigImpl.DEFAULT_URI);
Repository repo = factory.getRepository(null);
assertNotNull(repo);
} catch (RepositoryException e) {
// repository on top of spi2davex can only be initialized if the
// server is running. ok.
}
}
public void testGetRepository() throws RepositoryException {
RepositoryConfig config = new AbstractRepositoryConfig() {
public RepositoryService getRepositoryService() throws RepositoryException {
return service;
}
};
Repository repo = factory.getRepository(Collections.singletonMap(RepositoryFactoryImpl.REPOSITORY_CONFIG, config));
assertNotNull(repo);
}
public void testGetRepositoryWithLogger() throws RepositoryException {
RepositoryConfig config = new AbstractRepositoryConfig() {
public RepositoryService getRepositoryService() throws RepositoryException {
return service;
}
};
List lwprovider = new ArrayList();
lwprovider.add(null);
lwprovider.add(new Boolean(true));
lwprovider.add(new Slf4jLogWriterProvider());
Map params = new HashMap();
params.put(RepositoryFactoryImpl.REPOSITORY_CONFIG, config);
for (int i = 0; i < lwprovider.size(); i++) {
params.put(RepositoryConfigImpl.PARAM_LOG_WRITER_PROVIDER, lwprovider.get(i));
Repository repo = factory.getRepository(params);
assertNotNull(repo);
}
}
public void testGetRepositoryUnknownParams() throws RepositoryException {
Repository repo = factory.getRepository(Collections.EMPTY_MAP);
assertNull(repo);
}
//--------------------------------------------------------------------------
/**
* Dummy RepositoryService
*/
private static final class RepositoryServiceImpl implements RepositoryService {
public IdFactory getIdFactory() throws RepositoryException {
return null;
}
public NameFactory getNameFactory() throws RepositoryException {
return null;
}
public PathFactory getPathFactory() throws RepositoryException {
return null;
}
public QValueFactory getQValueFactory() throws RepositoryException {
return null;
}
public Map getRepositoryDescriptors() throws RepositoryException {
return null;
}
public SessionInfo obtain(Credentials credentials, String workspaceName) throws LoginException, NoSuchWorkspaceException, RepositoryException {
return null;
}
public SessionInfo obtain(SessionInfo sessionInfo, String workspaceName) throws LoginException, NoSuchWorkspaceException, RepositoryException {
return null;
}
public SessionInfo impersonate(SessionInfo sessionInfo, Credentials credentials) throws LoginException, RepositoryException {
return null;
}
public void dispose(SessionInfo sessionInfo) throws RepositoryException {
}
public String[] getWorkspaceNames(SessionInfo sessionInfo) throws RepositoryException {
return new String[0];
}
public boolean isGranted(SessionInfo sessionInfo, ItemId itemId, String[] actions) throws RepositoryException {
return false;
}
public QNodeDefinition getNodeDefinition(SessionInfo sessionInfo, NodeId nodeId) throws RepositoryException {
return null;
}
public QPropertyDefinition getPropertyDefinition(SessionInfo sessionInfo, PropertyId propertyId) throws RepositoryException {
return null;
}
public NodeInfo getNodeInfo(SessionInfo sessionInfo, NodeId nodeId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Iterator getItemInfos(SessionInfo sessionInfo, NodeId nodeId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Iterator getChildInfos(SessionInfo sessionInfo, NodeId parentId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Iterator<PropertyId> getReferences(SessionInfo sessionInfo, NodeId nodeId, Name propertyName, boolean weakReferences) throws ItemNotFoundException, RepositoryException {
return null;
}
public PropertyInfo getPropertyInfo(SessionInfo sessionInfo, PropertyId propertyId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Batch createBatch(SessionInfo sessionInfo, ItemId itemId) throws RepositoryException {
return null;
}
public void submit(Batch batch) throws PathNotFoundException, ItemNotFoundException, NoSuchNodeTypeException, ValueFormatException, VersionException, LockException, ConstraintViolationException, AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void importXml(SessionInfo sessionInfo, NodeId parentId, InputStream xmlStream, int uuidBehaviour) throws ItemExistsException, PathNotFoundException, VersionException, ConstraintViolationException, LockException, AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void move(SessionInfo sessionInfo, NodeId srcNodeId, NodeId destParentNodeId, Name destName) throws ItemExistsException, PathNotFoundException, VersionException, ConstraintViolationException, LockException, AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void copy(SessionInfo sessionInfo, String srcWorkspaceName, NodeId srcNodeId, NodeId destParentNodeId, Name destName) throws NoSuchWorkspaceException, ConstraintViolationException, VersionException, AccessDeniedException, PathNotFoundException, ItemExistsException, LockException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void update(SessionInfo sessionInfo, NodeId nodeId, String srcWorkspaceName) throws NoSuchWorkspaceException, AccessDeniedException, LockException, InvalidItemStateException, RepositoryException {
}
public void clone(SessionInfo sessionInfo, String srcWorkspaceName, NodeId srcNodeId, NodeId destParentNodeId, Name destName, boolean removeExisting) throws NoSuchWorkspaceException, ConstraintViolationException, VersionException, AccessDeniedException, PathNotFoundException, ItemExistsException, LockException, UnsupportedRepositoryOperationException, RepositoryException {
}
public LockInfo getLockInfo(SessionInfo sessionInfo, NodeId nodeId) throws AccessDeniedException, RepositoryException {
return null;
}
public LockInfo lock(SessionInfo sessionInfo, NodeId nodeId, boolean deep, boolean sessionScoped) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
return null;
}
public LockInfo lock(SessionInfo sessionInfo, NodeId nodeId, boolean deep, boolean sessionScoped, long timeoutHint, String ownerHint) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
return null;
}
public void refreshLock(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
}
public void unlock(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
}
public NodeId checkin(SessionInfo sessionInfo, NodeId nodeId) throws VersionException, UnsupportedRepositoryOperationException, InvalidItemStateException, LockException, RepositoryException {
return null;
}
public void checkout(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, LockException, RepositoryException {
}
public NodeId checkpoint(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public void removeVersion(SessionInfo sessionInfo, NodeId versionHistoryId, NodeId versionId) throws ReferentialIntegrityException, AccessDeniedException, UnsupportedRepositoryOperationException, VersionException, RepositoryException {
}
public void restore(SessionInfo sessionInfo, NodeId nodeId, NodeId versionId, boolean removeExisting) throws VersionException, PathNotFoundException, ItemExistsException, UnsupportedRepositoryOperationException, LockException, InvalidItemStateException, RepositoryException {
}
public void restore(SessionInfo sessionInfo, NodeId[] versionIds, boolean removeExisting) throws ItemExistsException, UnsupportedRepositoryOperationException, VersionException, LockException, InvalidItemStateException, RepositoryException {
}
public Iterator merge(SessionInfo sessionInfo, NodeId nodeId, String srcWorkspaceName, boolean bestEffort) throws NoSuchWorkspaceException, AccessDeniedException, MergeException, LockException, InvalidItemStateException, RepositoryException {
return null;
}
public Iterator merge(SessionInfo sessionInfo, NodeId nodeId, String srcWorkspaceName, boolean bestEffort, boolean isShallow) throws NoSuchWorkspaceException, AccessDeniedException, MergeException, LockException, InvalidItemStateException, RepositoryException {
return null;
}
public void resolveMergeConflict(SessionInfo sessionInfo, NodeId nodeId, NodeId[] mergeFailedIds, NodeId[] predecessorIds) throws VersionException, InvalidItemStateException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void addVersionLabel(SessionInfo sessionInfo, NodeId versionHistoryId, NodeId versionId, Name label, boolean moveLabel) throws VersionException, RepositoryException {
}
public void removeVersionLabel(SessionInfo sessionInfo, NodeId versionHistoryId, NodeId versionId, Name label) throws VersionException, RepositoryException {
}
public NodeId createActivity(SessionInfo sessionInfo, String title) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public void removeActivity(SessionInfo sessionInfo, NodeId activityId) throws UnsupportedRepositoryOperationException, RepositoryException {
}
public Iterator mergeActivity(SessionInfo sessionInfo, NodeId activityId) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public NodeId createConfiguration(SessionInfo sessionInfo, NodeId nodeId, NodeId baselineId) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public String[] getSupportedQueryLanguages(SessionInfo sessionInfo) throws RepositoryException {
return new String[0];
}
public String[] checkQueryStatement(SessionInfo sessionInfo, String statement, String language, Map namespaces) throws InvalidQueryException, RepositoryException {
return new String[0];
}
public QueryInfo executeQuery(SessionInfo sessionInfo, String statement, String language, Map<String, String> namespaces, long limit, long offset, Map<String, QValue> values) throws RepositoryException {
return null;
}
public EventFilter createEventFilter(SessionInfo sessionInfo, int eventTypes, Path absPath, boolean isDeep, String[] uuid, Name[] nodeTypeName, boolean noLocal) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public Subscription createSubscription(SessionInfo sessionInfo, EventFilter[] filters) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public void updateEventFilters(Subscription subscription, EventFilter[] filters) throws RepositoryException {
}
public EventBundle[] getEvents(Subscription subscription, long timeout) throws RepositoryException, InterruptedException {
return new EventBundle[0];
}
public EventBundle getEvents(SessionInfo sessionInfo, EventFilter filter, long after) throws RepositoryException, UnsupportedRepositoryOperationException {
return null;
}
public void dispose(Subscription subscription) throws RepositoryException {
}
public Map getRegisteredNamespaces(SessionInfo sessionInfo) throws RepositoryException {
return null;
}
public String getNamespaceURI(SessionInfo sessionInfo, String prefix) throws NamespaceException, RepositoryException {
return null;
}
public String getNamespacePrefix(SessionInfo sessionInfo, String uri) throws NamespaceException, RepositoryException {
return null;
}
public void registerNamespace(SessionInfo sessionInfo, String prefix, String uri) throws NamespaceException, UnsupportedRepositoryOperationException, AccessDeniedException, RepositoryException {
}
public void unregisterNamespace(SessionInfo sessionInfo, String uri) throws NamespaceException, UnsupportedRepositoryOperationException, AccessDeniedException, RepositoryException {
}
public Iterator getQNodeTypeDefinitions(SessionInfo sessionInfo) throws RepositoryException {
return null;
}
public Iterator getQNodeTypeDefinitions(SessionInfo sessionInfo, Name[] nodetypeNames) throws RepositoryException {
return null;
}
public void registerNodeTypes(SessionInfo sessionInfo, QNodeTypeDefinition[] nodeTypeDefinitions, boolean allowUpdate) throws InvalidNodeTypeDefinitionException, NodeTypeExistsException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void unregisterNodeTypes(SessionInfo sessionInfo, Name[] nodeTypeNames) throws UnsupportedRepositoryOperationException, NoSuchNodeTypeException, RepositoryException {
}
public void createWorkspace(SessionInfo sessionInfo, String name, String srcWorkspaceName) throws AccessDeniedException, UnsupportedRepositoryOperationException, NoSuchWorkspaceException, RepositoryException {
}
public void deleteWorkspace(SessionInfo sessionInfo, String name) throws AccessDeniedException, UnsupportedRepositoryOperationException, NoSuchWorkspaceException, RepositoryException {
}
}
}
|
jackrabbit-jcr-client/src/test/java/org/apache/jackrabbit/client/RepositoryFactoryImplTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.client;
import junit.framework.TestCase;
import org.apache.jackrabbit.jcr2spi.config.RepositoryConfig;
import org.apache.jackrabbit.spi.RepositoryService;
import org.apache.jackrabbit.spi.IdFactory;
import org.apache.jackrabbit.spi.NameFactory;
import org.apache.jackrabbit.spi.PathFactory;
import org.apache.jackrabbit.spi.QValueFactory;
import org.apache.jackrabbit.spi.SessionInfo;
import org.apache.jackrabbit.spi.ItemId;
import org.apache.jackrabbit.spi.QNodeDefinition;
import org.apache.jackrabbit.spi.NodeId;
import org.apache.jackrabbit.spi.QPropertyDefinition;
import org.apache.jackrabbit.spi.PropertyId;
import org.apache.jackrabbit.spi.NodeInfo;
import org.apache.jackrabbit.spi.PropertyInfo;
import org.apache.jackrabbit.spi.Batch;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.LockInfo;
import org.apache.jackrabbit.spi.QueryInfo;
import org.apache.jackrabbit.spi.EventFilter;
import org.apache.jackrabbit.spi.Path;
import org.apache.jackrabbit.spi.Subscription;
import org.apache.jackrabbit.spi.EventBundle;
import org.apache.jackrabbit.spi.QValue;
import org.apache.jackrabbit.spi.QNodeTypeDefinition;
import org.apache.jackrabbit.spi.commons.logging.Slf4jLogWriterProvider;
import org.apache.jackrabbit.client.spilogger.RepositoryConfigImpl;
import javax.jcr.RepositoryException;
import javax.jcr.Repository;
import javax.jcr.Credentials;
import javax.jcr.LoginException;
import javax.jcr.NoSuchWorkspaceException;
import javax.jcr.ItemNotFoundException;
import javax.jcr.PathNotFoundException;
import javax.jcr.ValueFormatException;
import javax.jcr.AccessDeniedException;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ItemExistsException;
import javax.jcr.InvalidItemStateException;
import javax.jcr.ReferentialIntegrityException;
import javax.jcr.MergeException;
import javax.jcr.NamespaceException;
import javax.jcr.RepositoryFactory;
import javax.jcr.query.InvalidQueryException;
import javax.jcr.lock.LockException;
import javax.jcr.version.VersionException;
import javax.jcr.nodetype.NoSuchNodeTypeException;
import javax.jcr.nodetype.ConstraintViolationException;
import javax.jcr.nodetype.InvalidNodeTypeDefinitionException;
import javax.jcr.nodetype.NodeTypeExistsException;
import java.util.Map;
import java.util.Collections;
import java.util.Iterator;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.io.InputStream;
/**
* <code>RepositoryFactoryImplTest</code>...
*/
public class RepositoryFactoryImplTest extends TestCase {
private final RepositoryFactory factory = new RepositoryFactoryImpl();
private final RepositoryService service = new RepositoryServiceImpl();
public void testGetDefaultRepository() throws RepositoryException {
try {
Repository repo = factory.getRepository(null);
assertNotNull(repo);
} catch (RepositoryException e) {
// repository on top of spi2davex can only be initialized if the
// server is running. ok.
}
try {
System.setProperty(org.apache.jackrabbit.client.spi2davex.RepositoryConfigImpl.REPOSITORY_SPI2DAVEX_URI, org.apache.jackrabbit.client.spi2davex.RepositoryConfigImpl.DEFAULT_URI);
Repository repo = factory.getRepository(null);
assertNotNull(repo);
} catch (RepositoryException e) {
// repository on top of spi2davex can only be initialized if the
// server is running. ok.
}
}
public void testGetRepository() throws RepositoryException {
RepositoryConfig config = new AbstractRepositoryConfig() {
public RepositoryService getRepositoryService() throws RepositoryException {
return service;
}
};
Repository repo = factory.getRepository(Collections.singletonMap(RepositoryFactoryImpl.REPOSITORY_CONFIG, config));
assertNotNull(repo);
}
public void testGetRepositoryWithLogger() throws RepositoryException {
RepositoryConfig config = new AbstractRepositoryConfig() {
public RepositoryService getRepositoryService() throws RepositoryException {
return service;
}
};
List lwprovider = new ArrayList();
lwprovider.add(null);
lwprovider.add(new Boolean(true));
lwprovider.add(new Slf4jLogWriterProvider());
Map params = new HashMap();
params.put(RepositoryFactoryImpl.REPOSITORY_CONFIG, config);
for (int i = 0; i < lwprovider.size(); i++) {
params.put(RepositoryConfigImpl.PARAM_LOG_WRITER_PROVIDER, lwprovider.get(i));
Repository repo = factory.getRepository(params);
assertNotNull(repo);
}
}
public void testGetRepositoryUnknownParams() throws RepositoryException {
Repository repo = factory.getRepository(Collections.EMPTY_MAP);
assertNull(repo);
}
//--------------------------------------------------------------------------
/**
* Dummy RepositoryService
*/
private static final class RepositoryServiceImpl implements RepositoryService {
public IdFactory getIdFactory() throws RepositoryException {
return null;
}
public NameFactory getNameFactory() throws RepositoryException {
return null;
}
public PathFactory getPathFactory() throws RepositoryException {
return null;
}
public QValueFactory getQValueFactory() throws RepositoryException {
return null;
}
public Map getRepositoryDescriptors() throws RepositoryException {
return null;
}
public SessionInfo obtain(Credentials credentials, String workspaceName) throws LoginException, NoSuchWorkspaceException, RepositoryException {
return null;
}
public SessionInfo obtain(SessionInfo sessionInfo, String workspaceName) throws LoginException, NoSuchWorkspaceException, RepositoryException {
return null;
}
public SessionInfo impersonate(SessionInfo sessionInfo, Credentials credentials) throws LoginException, RepositoryException {
return null;
}
public void dispose(SessionInfo sessionInfo) throws RepositoryException {
}
public String[] getWorkspaceNames(SessionInfo sessionInfo) throws RepositoryException {
return new String[0];
}
public boolean isGranted(SessionInfo sessionInfo, ItemId itemId, String[] actions) throws RepositoryException {
return false;
}
public QNodeDefinition getNodeDefinition(SessionInfo sessionInfo, NodeId nodeId) throws RepositoryException {
return null;
}
public QPropertyDefinition getPropertyDefinition(SessionInfo sessionInfo, PropertyId propertyId) throws RepositoryException {
return null;
}
public NodeInfo getNodeInfo(SessionInfo sessionInfo, NodeId nodeId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Iterator getItemInfos(SessionInfo sessionInfo, NodeId nodeId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Iterator getChildInfos(SessionInfo sessionInfo, NodeId parentId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Iterator<PropertyId> getReferences(SessionInfo sessionInfo, NodeId nodeId, Name propertyName, boolean weakReferences) throws ItemNotFoundException, RepositoryException {
return null;
}
public PropertyInfo getPropertyInfo(SessionInfo sessionInfo, PropertyId propertyId) throws ItemNotFoundException, RepositoryException {
return null;
}
public Batch createBatch(SessionInfo sessionInfo, ItemId itemId) throws RepositoryException {
return null;
}
public void submit(Batch batch) throws PathNotFoundException, ItemNotFoundException, NoSuchNodeTypeException, ValueFormatException, VersionException, LockException, ConstraintViolationException, AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void importXml(SessionInfo sessionInfo, NodeId parentId, InputStream xmlStream, int uuidBehaviour) throws ItemExistsException, PathNotFoundException, VersionException, ConstraintViolationException, LockException, AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void move(SessionInfo sessionInfo, NodeId srcNodeId, NodeId destParentNodeId, Name destName) throws ItemExistsException, PathNotFoundException, VersionException, ConstraintViolationException, LockException, AccessDeniedException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void copy(SessionInfo sessionInfo, String srcWorkspaceName, NodeId srcNodeId, NodeId destParentNodeId, Name destName) throws NoSuchWorkspaceException, ConstraintViolationException, VersionException, AccessDeniedException, PathNotFoundException, ItemExistsException, LockException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void update(SessionInfo sessionInfo, NodeId nodeId, String srcWorkspaceName) throws NoSuchWorkspaceException, AccessDeniedException, LockException, InvalidItemStateException, RepositoryException {
}
public void clone(SessionInfo sessionInfo, String srcWorkspaceName, NodeId srcNodeId, NodeId destParentNodeId, Name destName, boolean removeExisting) throws NoSuchWorkspaceException, ConstraintViolationException, VersionException, AccessDeniedException, PathNotFoundException, ItemExistsException, LockException, UnsupportedRepositoryOperationException, RepositoryException {
}
public LockInfo getLockInfo(SessionInfo sessionInfo, NodeId nodeId) throws AccessDeniedException, RepositoryException {
return null;
}
public LockInfo lock(SessionInfo sessionInfo, NodeId nodeId, boolean deep, boolean sessionScoped) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
return null;
}
public LockInfo lock(SessionInfo sessionInfo, NodeId nodeId, boolean deep, boolean sessionScoped, long timeoutHint, String ownerHint) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
return null;
}
public void refreshLock(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
}
public void unlock(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, LockException, AccessDeniedException, RepositoryException {
}
public NodeId checkin(SessionInfo sessionInfo, NodeId nodeId) throws VersionException, UnsupportedRepositoryOperationException, InvalidItemStateException, LockException, RepositoryException {
return null;
}
public void checkout(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, LockException, RepositoryException {
}
public NodeId checkpoint(SessionInfo sessionInfo, NodeId nodeId) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public void removeVersion(SessionInfo sessionInfo, NodeId versionHistoryId, NodeId versionId) throws ReferentialIntegrityException, AccessDeniedException, UnsupportedRepositoryOperationException, VersionException, RepositoryException {
}
public void restore(SessionInfo sessionInfo, NodeId nodeId, NodeId versionId, boolean removeExisting) throws VersionException, PathNotFoundException, ItemExistsException, UnsupportedRepositoryOperationException, LockException, InvalidItemStateException, RepositoryException {
}
public void restore(SessionInfo sessionInfo, NodeId[] versionIds, boolean removeExisting) throws ItemExistsException, UnsupportedRepositoryOperationException, VersionException, LockException, InvalidItemStateException, RepositoryException {
}
public Iterator merge(SessionInfo sessionInfo, NodeId nodeId, String srcWorkspaceName, boolean bestEffort) throws NoSuchWorkspaceException, AccessDeniedException, MergeException, LockException, InvalidItemStateException, RepositoryException {
return null;
}
public Iterator merge(SessionInfo sessionInfo, NodeId nodeId, String srcWorkspaceName, boolean bestEffort, boolean isShallow) throws NoSuchWorkspaceException, AccessDeniedException, MergeException, LockException, InvalidItemStateException, RepositoryException {
return null;
}
public void resolveMergeConflict(SessionInfo sessionInfo, NodeId nodeId, NodeId[] mergeFailedIds, NodeId[] predecessorIds) throws VersionException, InvalidItemStateException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void addVersionLabel(SessionInfo sessionInfo, NodeId versionHistoryId, NodeId versionId, Name label, boolean moveLabel) throws VersionException, RepositoryException {
}
public void removeVersionLabel(SessionInfo sessionInfo, NodeId versionHistoryId, NodeId versionId, Name label) throws VersionException, RepositoryException {
}
public NodeId createActivity(SessionInfo sessionInfo, String title) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public void removeActivity(SessionInfo sessionInfo, NodeId activityId) throws UnsupportedRepositoryOperationException, RepositoryException {
}
public Iterator mergeActivity(SessionInfo sessionInfo, NodeId activityId) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public NodeId createConfiguration(SessionInfo sessionInfo, NodeId nodeId, NodeId baselineId) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public String[] getSupportedQueryLanguages(SessionInfo sessionInfo) throws RepositoryException {
return new String[0];
}
public String[] checkQueryStatement(SessionInfo sessionInfo, String statement, String language, Map namespaces) throws InvalidQueryException, RepositoryException {
return new String[0];
}
public QueryInfo executeQuery(SessionInfo sessionInfo, String statement, String language, Map<String, String> namespaces, long limit, long offset, Map<String, QValue> values) throws RepositoryException {
return null;
}
public EventFilter createEventFilter(SessionInfo sessionInfo, int eventTypes, Path absPath, boolean isDeep, String[] uuid, Name[] nodeTypeName, boolean noLocal) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public Subscription createSubscription(SessionInfo sessionInfo, EventFilter[] filters) throws UnsupportedRepositoryOperationException, RepositoryException {
return null;
}
public void updateEventFilters(Subscription subscription, EventFilter[] filters) throws RepositoryException {
}
public EventBundle[] getEvents(Subscription subscription, long timeout) throws RepositoryException, InterruptedException {
return new EventBundle[0];
}
public void dispose(Subscription subscription) throws RepositoryException {
}
public Map getRegisteredNamespaces(SessionInfo sessionInfo) throws RepositoryException {
return null;
}
public String getNamespaceURI(SessionInfo sessionInfo, String prefix) throws NamespaceException, RepositoryException {
return null;
}
public String getNamespacePrefix(SessionInfo sessionInfo, String uri) throws NamespaceException, RepositoryException {
return null;
}
public void registerNamespace(SessionInfo sessionInfo, String prefix, String uri) throws NamespaceException, UnsupportedRepositoryOperationException, AccessDeniedException, RepositoryException {
}
public void unregisterNamespace(SessionInfo sessionInfo, String uri) throws NamespaceException, UnsupportedRepositoryOperationException, AccessDeniedException, RepositoryException {
}
public Iterator getQNodeTypeDefinitions(SessionInfo sessionInfo) throws RepositoryException {
return null;
}
public Iterator getQNodeTypeDefinitions(SessionInfo sessionInfo, Name[] nodetypeNames) throws RepositoryException {
return null;
}
public void registerNodeTypes(SessionInfo sessionInfo, QNodeTypeDefinition[] nodeTypeDefinitions, boolean allowUpdate) throws InvalidNodeTypeDefinitionException, NodeTypeExistsException, UnsupportedRepositoryOperationException, RepositoryException {
}
public void unregisterNodeTypes(SessionInfo sessionInfo, Name[] nodeTypeNames) throws UnsupportedRepositoryOperationException, NoSuchNodeTypeException, RepositoryException {
}
public void createWorkspace(SessionInfo sessionInfo, String name, String srcWorkspaceName) throws AccessDeniedException, UnsupportedRepositoryOperationException, NoSuchWorkspaceException, RepositoryException {
}
public void deleteWorkspace(SessionInfo sessionInfo, String name) throws AccessDeniedException, UnsupportedRepositoryOperationException, NoSuchWorkspaceException, RepositoryException {
}
}
}
|
JCR-2108: add dummy impl of new getEvents method
git-svn-id: 02b679d096242155780e1604e997947d154ee04a@790834 13f79535-47bb-0310-9956-ffa450edef68
|
jackrabbit-jcr-client/src/test/java/org/apache/jackrabbit/client/RepositoryFactoryImplTest.java
|
JCR-2108: add dummy impl of new getEvents method
|
|
Java
|
apache-2.0
|
0d813ef491ab9c4e3407553f0ee10900f3f24f7b
| 0
|
luj1985/dionysus,luj1985/dionysus,luj1985/dionysus
|
package com.huixinpn.dionysus.controller.course;
import com.huixinpn.dionysus.domain.course.Course;
import com.huixinpn.dionysus.domain.user.User;
import com.huixinpn.dionysus.dto.EntityPageData;
import com.huixinpn.dionysus.dto.course.CourseData;
import com.huixinpn.dionysus.repository.course.CourseRepository;
import com.huixinpn.dionysus.repository.tag.TagRepository;
import com.huixinpn.dionysus.repository.user.UserRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class CourseController {
@Autowired
private CourseRepository courseRepository;
@Autowired
private TagRepository tagRepository;
@Autowired
private UserRepository userRepository;
@RequestMapping(value = "/course/{id}/reg", method = RequestMethod.GET)
public ResponseEntity registerColurse(@PathVariable Long id) {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
Object principle = authentication.getPrincipal();
if (!(principle instanceof User)) {
return new ResponseEntity(HttpStatus.FORBIDDEN);
}
User login = (User) authentication.getPrincipal();
User reloaded = userRepository.findOne(login.getId());
Course course = courseRepository.findOne(id);
if (course != null &&
(course.getCapacity() == null || course.getUsers().size() < course.getCapacity()) &&
!reloaded.getCourses().contains(course)) {
course.getUsers().add(reloaded);
courseRepository.save(course);
}
return new ResponseEntity(HttpStatus.OK);
}
@RequestMapping(value = "/courses", method = RequestMethod.GET)
public
@ResponseBody
EntityPageData<CourseData> listCourses(@RequestParam(value = "page", required = false) Integer page,
@RequestParam(value = "size", required = false) Integer size) {
int pageSize = (size == null ? EntityPageData.getDefaultPageSize() : size);
int pageNumber = (page == null ? 0 : page);
Page<Course> pagedCourses = courseRepository.findAll(new PageRequest(pageNumber, pageSize));
return new EntityPageData<>(pagedCourses, CourseData.class);
}
@RequestMapping(value = "/courses/{id}", method = RequestMethod.GET)
public
@ResponseBody
CourseData listCourse(@PathVariable Long id) {
Course course = courseRepository.findOne(id);
return new CourseData(course);
}
}
|
dionysus-webapp/src/main/java/com/huixinpn/dionysus/controller/course/CourseController.java
|
package com.huixinpn.dionysus.controller.course;
import com.huixinpn.dionysus.domain.course.Course;
import com.huixinpn.dionysus.domain.user.User;
import com.huixinpn.dionysus.dto.EntityPageData;
import com.huixinpn.dionysus.dto.course.CourseData;
import com.huixinpn.dionysus.repository.course.CourseRepository;
import com.huixinpn.dionysus.repository.user.UserRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class CourseController {
@Autowired
private CourseRepository courseRepository;
@Autowired
private UserRepository userRepository;
@RequestMapping(value = "/course/registration/{id}", method = RequestMethod.GET)
public ResponseEntity registerColurse(@PathVariable Long id) throws Exception {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
Object principle = authentication.getPrincipal();
if (!(principle instanceof User)) {
return new ResponseEntity(HttpStatus.FORBIDDEN);
}
User login = (User) authentication.getPrincipal();
User reloaded = userRepository.findOne(login.getId());
Course course = courseRepository.findOne(id);
if (course != null &&
(course.getCapacity() == null || course.getUsers().size() < course.getCapacity()) &&
!reloaded.getCourses().contains(course)) {
course.getUsers().add(reloaded);
courseRepository.save(course);
}
return new ResponseEntity(HttpStatus.OK);
}
@RequestMapping(value = "/courses", method = RequestMethod.GET)
public
@ResponseBody
EntityPageData<CourseData> listCourses(@RequestParam(value = "page", required = false) Integer page,
@RequestParam(value = "size", required = false) Integer size) {
int pageSize = (size == null ? EntityPageData.getDefaultPageSize() : size);
int pageNumber = (page == null ? 0 : page);
Page<Course> pagedCourses = courseRepository.findAll(new PageRequest(pageNumber, pageSize));
return new EntityPageData<>(pagedCourses, CourseData.class);
}
}
|
add /courses/{id} mapping in CourseController
|
dionysus-webapp/src/main/java/com/huixinpn/dionysus/controller/course/CourseController.java
|
add /courses/{id} mapping in CourseController
|
|
Java
|
apache-2.0
|
18ac39a4725244fb91d87821ffe240d2e1c53a1d
| 0
|
Lekanich/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,slisson/intellij-community,fitermay/intellij-community,da1z/intellij-community,holmes/intellij-community,samthor/intellij-community,xfournet/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ernestp/consulo,Distrotech/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,Lekanich/intellij-community,samthor/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,allotria/intellij-community,kool79/intellij-community,retomerz/intellij-community,retomerz/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,adedayo/intellij-community,ernestp/consulo,fnouama/intellij-community,holmes/intellij-community,joewalnes/idea-community,FHannes/intellij-community,da1z/intellij-community,fnouama/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,signed/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,allotria/intellij-community,izonder/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,FHannes/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,da1z/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,ernestp/consulo,ol-loginov/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,hurricup/intellij-community,ryano144/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,allotria/intellij-community,caot/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,kool79/intellij-community,diorcety/intellij-community,blademainer/intellij-community,vladmm/intellij-community,holmes/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,tmpgit/intellij-community,caot/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,diorcety/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,consulo/consulo,gnuhub/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,xfournet/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,amith01994/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,slisson/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,da1z/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,izonder/intellij-community,vvv1559/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,holmes/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,petteyg/intellij-community,holmes/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,wreckJ/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,joewalnes/idea-community,ernestp/consulo,robovm/robovm-studio,ahb0327/intellij-community,vladmm/intellij-community,ernestp/consulo,petteyg/intellij-community,apixandru/intellij-community,supersven/intellij-community,ryano144/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,da1z/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,signed/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,izonder/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,signed/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,consulo/consulo,wreckJ/intellij-community,dslomov/intellij-community,kdwink/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,hurricup/intellij-community,caot/intellij-community,asedunov/intellij-community,signed/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,xfournet/intellij-community,supersven/intellij-community,jagguli/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,consulo/consulo,orekyuu/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,ernestp/consulo,ol-loginov/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,dslomov/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,apixandru/intellij-community,dslomov/intellij-community,fnouama/intellij-community,semonte/intellij-community,allotria/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,FHannes/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,joewalnes/idea-community,fitermay/intellij-community,vladmm/intellij-community,retomerz/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,signed/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,joewalnes/idea-community,semonte/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,xfournet/intellij-community,caot/intellij-community,slisson/intellij-community,vvv1559/intellij-community,semonte/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,ibinti/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,blademainer/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,apixandru/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,izonder/intellij-community,fitermay/intellij-community,caot/intellij-community,diorcety/intellij-community,caot/intellij-community,adedayo/intellij-community,dslomov/intellij-community,hurricup/intellij-community,da1z/intellij-community,xfournet/intellij-community,jagguli/intellij-community,ahb0327/intellij-community,da1z/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,izonder/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,caot/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,da1z/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,holmes/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,ibinti/intellij-community,allotria/intellij-community,kool79/intellij-community,kdwink/intellij-community,hurricup/intellij-community,adedayo/intellij-community,consulo/consulo,adedayo/intellij-community,Distrotech/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,caot/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,FHannes/intellij-community,kool79/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,clumsy/intellij-community,supersven/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,slisson/intellij-community,vladmm/intellij-community,caot/intellij-community,vladmm/intellij-community,consulo/consulo,xfournet/intellij-community,FHannes/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,apixandru/intellij-community,nicolargo/intellij-community,kool79/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,slisson/intellij-community,ryano144/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,joewalnes/idea-community,suncycheng/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,supersven/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,ryano144/intellij-community,signed/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,samthor/intellij-community,fnouama/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,diorcety/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,semonte/intellij-community,samthor/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,diorcety/intellij-community,slisson/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,supersven/intellij-community,holmes/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,FHannes/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,caot/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,blademainer/intellij-community,asedunov/intellij-community,semonte/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,izonder/intellij-community,petteyg/intellij-community,da1z/intellij-community,youdonghai/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,diorcety/intellij-community,samthor/intellij-community,semonte/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,allotria/intellij-community,signed/intellij-community,adedayo/intellij-community,holmes/intellij-community,ibinti/intellij-community,asedunov/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,consulo/consulo,muntasirsyed/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,caot/intellij-community,fitermay/intellij-community,amith01994/intellij-community,dslomov/intellij-community,FHannes/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,joewalnes/idea-community,signed/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,apixandru/intellij-community,adedayo/intellij-community,diorcety/intellij-community,signed/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,allotria/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,slisson/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,semonte/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,samthor/intellij-community,blademainer/intellij-community,ibinti/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,signed/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,clumsy/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,joewalnes/idea-community,youdonghai/intellij-community,kdwink/intellij-community,apixandru/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,fnouama/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,supersven/intellij-community,petteyg/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,diorcety/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,samthor/intellij-community,clumsy/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,joewalnes/idea-community,kdwink/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,signed/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.completion.impl.CompletionServiceImpl;
import com.intellij.codeInsight.lookup.LookupAdapter;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupEvent;
import com.intellij.codeInsight.lookup.LookupManager;
import com.intellij.codeInsight.lookup.impl.LookupImpl;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.CaretModel;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.SelectionModel;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.ex.ProgressIndicatorEx;
import com.intellij.patterns.ElementPattern;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.ui.HintListener;
import com.intellij.ui.LightweightHint;
import com.intellij.util.ObjectUtils;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Collections;
import java.util.EventObject;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* @author peter
*/
public class CompletionProgressIndicator extends ProgressIndicatorBase implements CompletionProcess{
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionProgressIndicator");
private final Editor myEditor;
private final CompletionParameters myParameters;
private final CodeCompletionHandlerBase myHandler;
private final LookupImpl myLookup;
private final MergingUpdateQueue myQueue;
private boolean myDisposed;
private boolean myInitialized;
private int myCount;
private final Update myUpdate = new Update("update") {
public void run() {
updateLookup();
}
};
private LightweightHint myHint;
private final Semaphore myFreezeSemaphore;
private boolean myModifiersReleased;
private String myOldDocumentText;
private int myOldCaret;
private int myOldStart;
private int myOldEnd;
private boolean myBackgrounded = true;
private OffsetMap myOffsetMap;
private final CopyOnWriteArrayList<Pair<Integer, ElementPattern<String>>> myRestartingPrefixConditions = ContainerUtil.createEmptyCOWList();
private final LookupAdapter myLookupListener = new LookupAdapter() {
public void itemSelected(LookupEvent event) {
finishCompletionProcess();
LookupElement item = event.getItem();
if (item == null) return;
setMergeCommand();
myOffsetMap.addOffset(CompletionInitializationContext.START_OFFSET, myEditor.getCaretModel().getOffset() - item.getLookupString().length());
CodeCompletionHandlerBase.selectLookupItem(item, event.getCompletionChar(), CompletionProgressIndicator.this, myLookup.getItems());
}
public void lookupCanceled(final LookupEvent event) {
finishCompletionProcess();
}
};
public CompletionProgressIndicator(final Editor editor, CompletionParameters parameters, CodeCompletionHandlerBase handler, Semaphore freezeSemaphore,
final OffsetMap offsetMap, LookupImpl lookup) {
myEditor = editor;
myParameters = parameters;
myHandler = handler;
myFreezeSemaphore = freezeSemaphore;
myOffsetMap = offsetMap;
myLookup = lookup;
myLookup.initLookup(new CompletionLookupArranger(parameters));
myLookup.addLookupListener(myLookupListener);
myLookup.setCalculating(true);
myQueue = new MergingUpdateQueue("completion lookup progress", 200, true, myEditor.getContentComponent());
ApplicationManager.getApplication().assertIsDispatchThread();
registerItself();
if (!ApplicationManager.getApplication().isUnitTestMode()) {
scheduleAdvertising();
}
trackModifiers();
}
public OffsetMap getOffsetMap() {
return myOffsetMap;
}
public int getSelectionEndOffset() {
return getOffsetMap().getOffset(CompletionInitializationContext.SELECTION_END_OFFSET);
}
void notifyBackgrounded() {
ApplicationManager.getApplication().assertIsDispatchThread();
myBackgrounded = true;
}
boolean isBackgrounded() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myBackgrounded;
}
private void scheduleAdvertising() {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
if (isOutdated()) return; //tests?
final List<CompletionContributor> list = ApplicationManager.getApplication().runReadAction(new Computable<List<CompletionContributor>>() {
public List<CompletionContributor> compute() {
if (isOutdated()) {
return Collections.emptyList();
}
return CompletionContributor.forParameters(myParameters);
}
});
for (final CompletionContributor contributor : list) {
if (myLookup.getAdvertisementText() != null) return;
if (!myLookup.isCalculating() && !myLookup.isVisible()) return;
String s = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Nullable
public String compute() {
if (isOutdated()) {
return null;
}
return contributor.advertise(myParameters);
}
});
if (myLookup.getAdvertisementText() != null) return;
if (s != null) {
myLookup.setAdvertisementText(s);
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (isOutdated()) {
return;
}
if (isAutopopupCompletion() && !myInitialized) {
return;
}
if (!isBackgrounded()) {
return;
}
updateLookup();
}
}, myQueue.getModalityState());
return;
}
}
}
});
}
private boolean isOutdated() {
return myDisposed ||
myEditor.isDisposed() ||
!ApplicationManager.getApplication().isUnitTestMode() && myEditor.getComponent().getRootPane() == null;
}
private void trackModifiers() {
final JComponent contentComponent = myEditor.getContentComponent();
contentComponent.addKeyListener(new KeyAdapter() {
public void keyPressed(KeyEvent e) {
processModifier(e);
}
public void keyReleased(KeyEvent e) {
processModifier(e);
}
private void processModifier(KeyEvent e) {
final int code = e.getKeyCode();
if (code == KeyEvent.VK_CONTROL || code == KeyEvent.VK_META || code == KeyEvent.VK_ALT || code == KeyEvent.VK_SHIFT) {
myModifiersReleased = true;
if (myOldDocumentText != null) {
cleanup();
}
contentComponent.removeKeyListener(this);
}
}
});
}
private void setMergeCommand() {
CommandProcessor.getInstance().setCurrentCommandGroupId("Completion" + hashCode());
}
public void showLookup() {
updateLookup();
}
public CompletionParameters getParameters() {
return myParameters;
}
private void registerItself() {
CompletionServiceImpl.getCompletionService().setCurrentCompletion(this);
}
public void liveAfterDeath(@Nullable final LightweightHint hint) {
if (myModifiersReleased || ApplicationManager.getApplication().isUnitTestMode()) {
return;
}
registerItself();
myHint = hint;
if (hint != null) {
hint.addHintListener(new HintListener() {
public void hintHidden(final EventObject event) {
hint.removeHintListener(this);
cleanup();
}
});
}
final Document document = myEditor.getDocument();
document.addDocumentListener(new DocumentAdapter() {
@Override
public void beforeDocumentChange(DocumentEvent e) {
document.removeDocumentListener(this);
cleanup();
}
});
final SelectionModel selectionModel = myEditor.getSelectionModel();
selectionModel.addSelectionListener(new SelectionListener() {
public void selectionChanged(SelectionEvent e) {
selectionModel.removeSelectionListener(this);
cleanup();
}
});
final CaretModel caretModel = myEditor.getCaretModel();
caretModel.addCaretListener(new CaretListener() {
public void caretPositionChanged(CaretEvent e) {
caretModel.removeCaretListener(this);
cleanup();
}
});
}
public CodeCompletionHandlerBase getHandler() {
return myHandler;
}
public LookupImpl getLookup() {
return myLookup;
}
private void updateLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (isOutdated()) return;
if (!myInitialized) {
myInitialized = true;
if (StringUtil.isEmpty(myLookup.getAdvertisementText()) && !isAutopopupCompletion()) {
final String text = DefaultCompletionContributor.getDefaultAdvertisementText(myParameters);
if (text != null) {
myLookup.setAdvertisementText(text);
}
}
myLookup.show();
}
myLookup.refreshUi();
}
public int getCount() {
return myCount;
}
final boolean isInsideIdentifier() {
return getIdentifierEndOffset() != getSelectionEndOffset();
}
public int getIdentifierEndOffset() {
return myOffsetMap.getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET);
}
public synchronized void addItem(final LookupElement item) {
if (!isRunning()) return;
ProgressManager.checkCanceled();
final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode();
if (!unitTestMode) {
assert !ApplicationManager.getApplication().isDispatchThread();
}
myLookup.addItem(item);
myCount++;
if (unitTestMode) return;
if (myCount == 1) {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
try {
Thread.sleep(300);
}
catch (InterruptedException e) {
LOG.error(e);
}
myFreezeSemaphore.up();
}
});
}
myQueue.queue(myUpdate);
}
public void closeAndFinish(boolean hideLookup) {
if (myHint != null) {
myHint.hide();
}
if (LookupManager.getActiveLookup(myEditor) == myLookup) {
myLookup.removeLookupListener(myLookupListener);
finishCompletionProcess();
if (hideLookup) {
LookupManager.getInstance(getProject()).hideActiveLookup();
}
}
}
private void finishCompletionProcess() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
cancel();
}
});
assert !myDisposed;
myDisposed = true;
ApplicationManager.getApplication().assertIsDispatchThread();
Disposer.dispose(myQueue);
cleanup();
}
@TestOnly
public static void cleanupForNextTest() {
CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
if (currentCompletion != null) {
currentCompletion.finishCompletionProcess();
}
}
private void cleanup() {
assert ApplicationManager.getApplication().isDispatchThread();
myHint = null;
myOldDocumentText = null;
unregisterItself();
}
private void unregisterItself() {
CompletionServiceImpl.getCompletionService().setCurrentCompletion(null);
}
public void stop() {
super.stop();
myQueue.cancelAllUpdates();
myFreezeSemaphore.up();
invokeLaterIfNotDispatch(new Runnable() {
public void run() {
if (isOutdated()) return;
if (isCanceled()) return;
//what if a new completion was invoked by the user before this 'later'?
if (CompletionProgressIndicator.this != CompletionServiceImpl.getCompletionService().getCurrentCompletion()) return;
myLookup.setCalculating(false);
if (!isBackgrounded()) return;
if (hideAutopopupIfMeaningless()) {
return;
}
if (myCount == 0) {
LookupManager.getInstance(getProject()).hideActiveLookup();
assert CompletionServiceImpl.getCompletionService().getCurrentCompletion() == null;
if (!isAutopopupCompletion() ) {
myHandler.handleEmptyLookup(getProject(), myEditor, myParameters, CompletionProgressIndicator.this);
}
} else {
updateLookup();
}
}
});
}
public boolean hideAutopopupIfMeaningless() {
if (!myLookup.isFocused() && !myLookup.isCalculating()) {
myLookup.refreshUi();
final List<LookupElement> items = myLookup.getItems();
if (items.size() == 0 || items.size() == 1 && (items.get(0).getPrefixMatcher().getPrefix() + myLookup.getAdditionalPrefix()).equals(items.get(0).getLookupString())) {
myLookup.hideLookup(false);
assert CompletionServiceImpl.getCompletionService().getCurrentCompletion() == null;
return true;
}
}
return false;
}
private void invokeLaterIfNotDispatch(final Runnable runnable) {
final Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode()) {
runnable.run();
} else {
application.invokeLater(runnable, myQueue.getModalityState());
}
}
public volatile String cancelTrace;
@Override
public void cancel() {
if (cancelTrace == null) {
Throwable t = new Throwable();
final StringWriter writer = new StringWriter();
t.printStackTrace(new PrintWriter(writer));
cancelTrace = writer.toString();
}
super.cancel();
}
@Override
public void start() {
if (isCanceled()) {
throw new AssertionError("Restarting completion process is prohibited: trace=" + cancelTrace);
}
super.start();
}
@Override
public void initStateFrom(@NotNull ProgressIndicatorEx indicator) {
if (isCanceled()) {
throw new AssertionError("Re-init-ting completion process is prohibited: trace=" + cancelTrace);
}
if (indicator.isCanceled()) {
LOG.error("initStateFrom canceled: " + indicator);
}
super.initStateFrom(indicator);
}
public boolean fillInCommonPrefix(final boolean explicit) {
if (isInsideIdentifier()) {
return false;
}
final Boolean aBoolean = new WriteCommandAction<Boolean>(getProject()) {
protected void run(Result<Boolean> result) throws Throwable {
if (!explicit) {
setMergeCommand();
}
try {
result.setResult(myLookup.fillInCommonPrefix(explicit));
}
catch (Exception e) {
LOG.error(e);
}
}
}.execute().getResultObject();
return aBoolean.booleanValue();
}
public boolean isInitialized() {
return myInitialized;
}
public void restorePrefix() {
setMergeCommand();
if (myOldDocumentText != null) {
myEditor.getDocument().setText(myOldDocumentText);
myEditor.getSelectionModel().setSelection(myOldStart, myOldEnd);
myEditor.getCaretModel().moveToOffset(myOldCaret);
myOldDocumentText = null;
return;
}
getLookup().restorePrefix();
}
public Editor getEditor() {
return myEditor;
}
public void rememberDocumentState() {
if (myModifiersReleased) {
return;
}
myOldDocumentText = myEditor.getDocument().getText();
myOldCaret = myEditor.getCaretModel().getOffset();
myOldStart = myEditor.getSelectionModel().getSelectionStart();
myOldEnd = myEditor.getSelectionModel().getSelectionEnd();
}
public boolean isRepeatedInvocation(CompletionType completionType, Editor editor) {
return completionType == myParameters.getCompletionType() && editor == myEditor;
}
@Override
public boolean isAutopopupCompletion() {
return !myLookup.isFocused();
}
@NotNull
public Project getProject() {
return ObjectUtils.assertNotNull(myEditor.getProject());
}
public void addWatchedPrefix(int startOffset, ElementPattern<String> restartCondition) {
if (isAutopopupCompletion()) {
myRestartingPrefixConditions.add(Pair.create(startOffset, restartCondition));
}
}
public void prefixUpdated() {
final CharSequence text = myEditor.getDocument().getCharsSequence();
final int caretOffset = myEditor.getCaretModel().getOffset();
for (Pair<Integer, ElementPattern<String>> pair : myRestartingPrefixConditions) {
final String newPrefix = text.subSequence(pair.first, caretOffset).toString();
if (pair.second.accepts(newPrefix)) {
restartCompletion();
return;
}
}
hideAutopopupIfMeaningless();
}
public void restartCompletion() {
closeAndFinish(false);
myHandler.invokeCompletion(getProject(), myEditor, PsiUtilBase.getPsiFileInEditor(myEditor, getProject()), myParameters.getInvocationCount());
}
}
|
platform/lang-impl/src/com/intellij/codeInsight/completion/CompletionProgressIndicator.java
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.completion.impl.CompletionServiceImpl;
import com.intellij.codeInsight.lookup.LookupAdapter;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupEvent;
import com.intellij.codeInsight.lookup.LookupManager;
import com.intellij.codeInsight.lookup.impl.LookupImpl;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.CaretModel;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.SelectionModel;
import com.intellij.openapi.editor.event.*;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.ex.ProgressIndicatorEx;
import com.intellij.patterns.ElementPattern;
import com.intellij.psi.util.PsiUtilBase;
import com.intellij.ui.HintListener;
import com.intellij.ui.LightweightHint;
import com.intellij.util.ObjectUtils;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Collections;
import java.util.EventObject;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* @author peter
*/
public class CompletionProgressIndicator extends ProgressIndicatorBase implements CompletionProcess{
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.completion.CompletionProgressIndicator");
private final Editor myEditor;
private final CompletionParameters myParameters;
private final CodeCompletionHandlerBase myHandler;
private final LookupImpl myLookup;
private final MergingUpdateQueue myQueue;
private boolean myDisposed;
private boolean myInitialized;
private int myCount;
private final Update myUpdate = new Update("update") {
public void run() {
updateLookup();
}
};
private LightweightHint myHint;
private final Semaphore myFreezeSemaphore;
private boolean myModifiersReleased;
private String myOldDocumentText;
private int myOldCaret;
private int myOldStart;
private int myOldEnd;
private boolean myBackgrounded = true;
private OffsetMap myOffsetMap;
private final CopyOnWriteArrayList<Pair<Integer, ElementPattern<String>>> myRestartingPrefixConditions = ContainerUtil.createEmptyCOWList();
private final LookupAdapter myLookupListener = new LookupAdapter() {
public void itemSelected(LookupEvent event) {
finishCompletionProcess();
LookupElement item = event.getItem();
if (item == null) return;
setMergeCommand();
myOffsetMap.addOffset(CompletionInitializationContext.START_OFFSET, myEditor.getCaretModel().getOffset() - item.getLookupString().length());
CodeCompletionHandlerBase.selectLookupItem(item, event.getCompletionChar(), CompletionProgressIndicator.this, myLookup.getItems());
}
public void lookupCanceled(final LookupEvent event) {
finishCompletionProcess();
}
};
public CompletionProgressIndicator(final Editor editor, CompletionParameters parameters, CodeCompletionHandlerBase handler, Semaphore freezeSemaphore,
final OffsetMap offsetMap, LookupImpl lookup) {
myEditor = editor;
myParameters = parameters;
myHandler = handler;
myFreezeSemaphore = freezeSemaphore;
myOffsetMap = offsetMap;
myLookup = lookup;
myLookup.initLookup(new CompletionLookupArranger(parameters));
myLookup.addLookupListener(myLookupListener);
myLookup.setCalculating(true);
myQueue = new MergingUpdateQueue("completion lookup progress", 200, true, myEditor.getContentComponent());
ApplicationManager.getApplication().assertIsDispatchThread();
registerItself();
if (!ApplicationManager.getApplication().isUnitTestMode()) {
scheduleAdvertising();
}
trackModifiers();
}
public OffsetMap getOffsetMap() {
return myOffsetMap;
}
public int getSelectionEndOffset() {
return getOffsetMap().getOffset(CompletionInitializationContext.SELECTION_END_OFFSET);
}
void notifyBackgrounded() {
ApplicationManager.getApplication().assertIsDispatchThread();
myBackgrounded = true;
}
boolean isBackgrounded() {
ApplicationManager.getApplication().assertIsDispatchThread();
return myBackgrounded;
}
private void scheduleAdvertising() {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
if (isOutdated()) return; //tests?
final List<CompletionContributor> list = ApplicationManager.getApplication().runReadAction(new Computable<List<CompletionContributor>>() {
public List<CompletionContributor> compute() {
if (isOutdated()) {
return Collections.emptyList();
}
return CompletionContributor.forParameters(myParameters);
}
});
for (final CompletionContributor contributor : list) {
if (myLookup.getAdvertisementText() != null) return;
if (!myLookup.isCalculating() && !myLookup.isVisible()) return;
String s = ApplicationManager.getApplication().runReadAction(new Computable<String>() {
@Nullable
public String compute() {
if (isOutdated()) {
return null;
}
return contributor.advertise(myParameters);
}
});
if (myLookup.getAdvertisementText() != null) return;
if (s != null) {
myLookup.setAdvertisementText(s);
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
if (isOutdated()) {
return;
}
if (isAutopopupCompletion() && !myInitialized) {
return;
}
if (!isBackgrounded()) {
return;
}
updateLookup();
}
}, myQueue.getModalityState());
return;
}
}
}
});
}
private boolean isOutdated() {
return myDisposed ||
myEditor.isDisposed() ||
!ApplicationManager.getApplication().isUnitTestMode() && myEditor.getComponent().getRootPane() == null;
}
private void trackModifiers() {
final JComponent contentComponent = myEditor.getContentComponent();
contentComponent.addKeyListener(new KeyAdapter() {
public void keyPressed(KeyEvent e) {
processModifier(e);
}
public void keyReleased(KeyEvent e) {
processModifier(e);
}
private void processModifier(KeyEvent e) {
final int code = e.getKeyCode();
if (code == KeyEvent.VK_CONTROL || code == KeyEvent.VK_META || code == KeyEvent.VK_ALT || code == KeyEvent.VK_SHIFT) {
myModifiersReleased = true;
if (myOldDocumentText != null) {
cleanup();
}
contentComponent.removeKeyListener(this);
}
}
});
}
private void setMergeCommand() {
CommandProcessor.getInstance().setCurrentCommandGroupId("Completion" + hashCode());
}
public void showLookup() {
updateLookup();
}
public CompletionParameters getParameters() {
return myParameters;
}
private void registerItself() {
CompletionServiceImpl.getCompletionService().setCurrentCompletion(this);
}
public void liveAfterDeath(@Nullable final LightweightHint hint) {
if (myModifiersReleased || ApplicationManager.getApplication().isUnitTestMode()) {
return;
}
registerItself();
myHint = hint;
if (hint != null) {
hint.addHintListener(new HintListener() {
public void hintHidden(final EventObject event) {
hint.removeHintListener(this);
cleanup();
}
});
}
final Document document = myEditor.getDocument();
document.addDocumentListener(new DocumentAdapter() {
@Override
public void beforeDocumentChange(DocumentEvent e) {
document.removeDocumentListener(this);
cleanup();
}
});
final SelectionModel selectionModel = myEditor.getSelectionModel();
selectionModel.addSelectionListener(new SelectionListener() {
public void selectionChanged(SelectionEvent e) {
selectionModel.removeSelectionListener(this);
cleanup();
}
});
final CaretModel caretModel = myEditor.getCaretModel();
caretModel.addCaretListener(new CaretListener() {
public void caretPositionChanged(CaretEvent e) {
caretModel.removeCaretListener(this);
cleanup();
}
});
}
public CodeCompletionHandlerBase getHandler() {
return myHandler;
}
public LookupImpl getLookup() {
return myLookup;
}
private void updateLookup() {
ApplicationManager.getApplication().assertIsDispatchThread();
if (isOutdated()) return;
if (!myInitialized) {
myInitialized = true;
if (StringUtil.isEmpty(myLookup.getAdvertisementText()) && !isAutopopupCompletion()) {
final String text = DefaultCompletionContributor.getDefaultAdvertisementText(myParameters);
if (text != null) {
myLookup.setAdvertisementText(text);
}
}
myLookup.show();
}
myLookup.refreshUi();
}
public int getCount() {
return myCount;
}
final boolean isInsideIdentifier() {
return getIdentifierEndOffset() != getSelectionEndOffset();
}
public int getIdentifierEndOffset() {
return myOffsetMap.getOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET);
}
public synchronized void addItem(final LookupElement item) {
if (!isRunning()) return;
ProgressManager.checkCanceled();
final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode();
if (!unitTestMode) {
assert !ApplicationManager.getApplication().isDispatchThread();
}
myLookup.addItem(item);
myCount++;
if (unitTestMode) return;
if (myCount == 1) {
ApplicationManager.getApplication().executeOnPooledThread(new Runnable() {
public void run() {
try {
Thread.sleep(300);
}
catch (InterruptedException e) {
LOG.error(e);
}
myFreezeSemaphore.up();
}
});
}
myQueue.queue(myUpdate);
}
public void closeAndFinish(boolean hideLookup) {
if (myHint != null) {
myHint.hide();
}
if (LookupManager.getActiveLookup(myEditor) == myLookup) {
myLookup.removeLookupListener(myLookupListener);
finishCompletionProcess();
if (hideLookup) {
LookupManager.getInstance(getProject()).hideActiveLookup();
}
}
}
private void finishCompletionProcess() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
cancel();
}
});
assert !myDisposed;
myDisposed = true;
ApplicationManager.getApplication().assertIsDispatchThread();
Disposer.dispose(myQueue);
cleanup();
}
@TestOnly
public static void cleanupForNextTest() {
CompletionProgressIndicator currentCompletion = CompletionServiceImpl.getCompletionService().getCurrentCompletion();
if (currentCompletion != null) {
currentCompletion.finishCompletionProcess();
}
}
private void cleanup() {
assert ApplicationManager.getApplication().isDispatchThread();
myHint = null;
myOldDocumentText = null;
unregisterItself();
}
private void unregisterItself() {
CompletionServiceImpl.getCompletionService().setCurrentCompletion(null);
}
public void stop() {
super.stop();
myQueue.cancelAllUpdates();
myFreezeSemaphore.up();
invokeLaterIfNotDispatch(new Runnable() {
public void run() {
if (isOutdated()) return;
if (isCanceled()) return;
//what if a new completion was invoked by the user before this 'later'?
if (CompletionProgressIndicator.this != CompletionServiceImpl.getCompletionService().getCurrentCompletion()) return;
myLookup.setCalculating(false);
if (!isBackgrounded()) return;
if (hideAutopopupIfMeaningless()) {
return;
}
if (myCount == 0) {
LookupManager.getInstance(getProject()).hideActiveLookup();
assert CompletionServiceImpl.getCompletionService().getCurrentCompletion() == null;
if (!isAutopopupCompletion() ) {
myHandler.handleEmptyLookup(getProject(), myEditor, myParameters, CompletionProgressIndicator.this);
}
} else {
updateLookup();
}
}
});
}
public boolean hideAutopopupIfMeaningless() {
if (!myLookup.isFocused() && !myLookup.isCalculating()) {
myLookup.refreshUi();
final List<LookupElement> items = myLookup.getItems();
if (items.size() == 0 || items.size() == 1 && (items.get(0).getPrefixMatcher().getPrefix() + myLookup.getAdditionalPrefix()).equals(items.get(0).getLookupString())) {
myLookup.hideLookup(false);
assert CompletionServiceImpl.getCompletionService().getCurrentCompletion() == null;
return true;
}
}
return false;
}
private void invokeLaterIfNotDispatch(final Runnable runnable) {
final Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode()) {
runnable.run();
} else {
application.invokeLater(runnable, myQueue.getModalityState());
}
}
public volatile String cancelTrace;
@Override
public void cancel() {
if (cancelTrace == null) {
Throwable t = new Throwable();
final StringWriter writer = new StringWriter();
t.printStackTrace(new PrintWriter(writer));
cancelTrace = writer.toString();
}
super.cancel();
}
@Override
public void start() {
if (isCanceled()) {
throw new AssertionError("Restarting completion process is prohibited: trace=" + cancelTrace);
}
super.start();
}
@Override
public void initStateFrom(@NotNull ProgressIndicatorEx indicator) {
if (isCanceled()) {
throw new AssertionError("Re-init-ting completion process is prohibited: trace=" + cancelTrace);
}
if (indicator.isCanceled()) {
LOG.error("initStateFrom canceled: " + indicator);
}
super.initStateFrom(indicator);
}
public boolean fillInCommonPrefix(final boolean explicit) {
if (isInsideIdentifier()) {
return false;
}
final Boolean aBoolean = new WriteCommandAction<Boolean>(getProject()) {
protected void run(Result<Boolean> result) throws Throwable {
if (!explicit) {
setMergeCommand();
}
try {
result.setResult(myLookup.fillInCommonPrefix(explicit));
}
catch (Exception e) {
LOG.error(e);
}
}
}.execute().getResultObject();
return aBoolean.booleanValue();
}
public boolean isInitialized() {
return myInitialized;
}
public void restorePrefix() {
setMergeCommand();
if (myOldDocumentText != null) {
myEditor.getDocument().setText(myOldDocumentText);
myEditor.getSelectionModel().setSelection(myOldStart, myOldEnd);
myEditor.getCaretModel().moveToOffset(myOldCaret);
myOldDocumentText = null;
return;
}
getLookup().restorePrefix();
}
public Editor getEditor() {
return myEditor;
}
public void rememberDocumentState() {
if (myModifiersReleased) {
return;
}
myOldDocumentText = myEditor.getDocument().getText();
myOldCaret = myEditor.getCaretModel().getOffset();
myOldStart = myEditor.getSelectionModel().getSelectionStart();
myOldEnd = myEditor.getSelectionModel().getSelectionEnd();
}
public boolean isRepeatedInvocation(CompletionType completionType, Editor editor) {
return completionType == myParameters.getCompletionType() && editor == myEditor && !isAutopopupCompletion();
}
@Override
public boolean isAutopopupCompletion() {
return !myLookup.isFocused();
}
@NotNull
public Project getProject() {
return ObjectUtils.assertNotNull(myEditor.getProject());
}
public void addWatchedPrefix(int startOffset, ElementPattern<String> restartCondition) {
if (isAutopopupCompletion()) {
myRestartingPrefixConditions.add(Pair.create(startOffset, restartCondition));
}
}
public void prefixUpdated() {
final CharSequence text = myEditor.getDocument().getCharsSequence();
final int caretOffset = myEditor.getCaretModel().getOffset();
for (Pair<Integer, ElementPattern<String>> pair : myRestartingPrefixConditions) {
final String newPrefix = text.subSequence(pair.first, caretOffset).toString();
if (pair.second.accepts(newPrefix)) {
restartCompletion();
return;
}
}
hideAutopopupIfMeaningless();
}
public void restartCompletion() {
closeAndFinish(false);
myHandler.invokeCompletion(getProject(), myEditor, PsiUtilBase.getPsiFileInEditor(myEditor, getProject()), myParameters.getInvocationCount());
}
}
|
explicit completion during active autopopup should insert common prefix and focus the lookup
|
platform/lang-impl/src/com/intellij/codeInsight/completion/CompletionProgressIndicator.java
|
explicit completion during active autopopup should insert common prefix and focus the lookup
|
|
Java
|
apache-2.0
|
ab6d5f72e676ec536e92d43f1b57c34cefe732ec
| 0
|
PRIDE-Archive/data-provider-api
|
package uk.ac.ebi.pride.archive.dataprovider.data.spectra;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class SpectrumNumberArrayDeserializerTest{
String spectraLine = "{\"usi\":\"mzspec:PRD000902:Rice_leaf_0h_phospho_test1:scan:3817:N[UNIMOD:7]NGSSIGS[UNIMOD:21]PGPGR/2\",\"spectraUsi\":\"mzspec:PXD002222:Rice_leaf_0h_phospho_test1:scan:3817\",\"assayAccession\":\"c0300332f12ec3420b966972b077cdb25520c0ba\",\"projectAccession\":\"PXD002222\",\"reanalysisAccession\":\"PRD000902\",\"proteinAccessions\":[\"Os07t0584500-01\"],\"peptideSequence\":\"NNGSSIGSPGPGR\",\"peptidoform\":\"N[UNIMOD:7]NGSSIGS[UNIMOD:21]PGPGR/2\",\"scores\":[{\"accession\":\"MS:1002257\",\"name\":\"Comet:expectation value\",\"value\":\"2.99E-8\"},{\"accession\":\"MS:1002354\",\"name\":\"PSM-level q-value\",\"value\":\"5.0E-6\"},{\"accession\":\"MS:1002357\",\"name\":\"PSM-level probability\",\"value\":\"1.0\"},{\"accession\":\"MS:1002355\",\"name\":\"PSM-level FDRScore\",\"value\":\"1.2510634038933093E-5\"}],\"sampleProperties\":[{\"accession\":\"EFO:0000324\",\"name\":\"cell type\",\"value\":\"not applicable\"},{\"accession\":\"OBI:0100026\",\"name\":\"organism\",\"value\":\"Oryza sativa\"},{\"accession\":\"EFO:0000635\",\"name\":\"organism part\",\"value\":\"Leaf\"},{\"accession\":\"EFO:0000408\",\"name\":\"disease\",\"value\":\"Xanthomonas oryzae pv. oryzae\"},{\"accession\":\"EFO:0002091\",\"name\":\"biological replicate\",\"value\":\"1\"}],\"isDecoy\":false,\"isValid\":true,\"precursorCharge\":2,\"precursorMz\":640.764810900142,\"bestSearchEngineScore\":{\"accession\":\"MS:1002354\",\"name\":\"PSM-level q-value\",\"value\":\"5.0E-6\"},\"numPeaks\":198,\"masses\":\"eJwllFtsVFUUhjdTi0JjiKiI5bbpxBdiH+TS2MbAYcolaRMCNTFC0Z5OKYp2gCktlpYpZ3pLsBRKJ/AwBjnTC2BJY2wjRAzkMBaMVDBgImGSMYdLjKQBgQolpeXSbz39WXuvy7/W+vdWShlNT+sN9QKbC0A34CnGbpmK7bRG5b7K7x9Hq8K3eRxVMF42jvZmf5C44mW12LF0C+zOAN08z2fYeQu5d1a0bR1HHX+fe5XVVg0mcvZSZ+b8Cvxn+74i/mb7bupO92Ob08p3kOfBujrsGXVf4D/p/xryDGWFyXOvgHtraBp1rDc64et42qvAkSrh9/IocfrRvK/FvsG9kYgXkt8Tp3/9LANezpREA3Ue+kq5H90TkPh+5mI/jpeA/3STx4z7TOp7T1DPWNgL6py78LQWT4Snzp5MXme5R3BlRhP+qxLwMvJypJ9V6Zy7+xuEX9uytfif6a3kvCvOPNz8ieQxYz72YxVkybxWH6Su830/fbsf+bDNI707qVNUXSdYB9r+uMSZtcKz9ALoNrxE38ahcuZifu4T/7IR6W9Lk+TZch50yuaDKuJnProiIXuK3tgAv9Bk0VttUnRS2bGd+52hasEe0NzqZa6qPEX4H07bAx4Iyzw3LkVf6suL9OP4W4izoz+S3/r2h0byBebiZwcuix5zU9YT13kN/uYKr+zN/hD9qaLwNvy7CrGN9hTqGh/3oUuVHxL9rW5EB3pRPfztRX2guSSAbu0LAy3Y/X+Cxm9j0velmMzrfCP7VtnNxLkfeNGt6b0OT+fKJtnXX5ky17+HmrEXn91I/ECgFfv3IGi80yFzessLTzv9E+ah36yhH/NWpeSZUS/22xH2aaQMloMjg+R356V+Cq9XFsi+R08Tp//1bsK+nYvunOGw3Kf+Ck91f6n0kbaNPs3hmOzjXoz5qjklzNH+r0POB/PZm/tkAv+QHg7Lu51Vg+719VzZa/9c/hWdLKE/4+oYujH/uMm71O9G8DcvJ0Ps8WqY+esrE9CfM5BGHv3LrF3wSBb7OV9QCB8dK8G2fhoj3jn1Kn669Rw6cfsyxT61n371mRjvzDnZhz71d4XUM4ty2bO1L5U4ozubc7f5XBH+Pcfln+g8DR+ryyv9H00KHlsj84sE2Y8T7RGdHcoU/8N36FtF7lPfKP1G/odAvui0uEPsg3dCErcGHbnRCP+FFX4PVDte34cdibFHtfs19q/rg/BzG36uMZ4DtkiqlQ==\",\"intensities\":\"eJwllHtMz3sYx7/Nco+Ve23tO8sOM3+g5fzyG9+jhMU5uS2X2Pc4OcOsMJEO85XNLUYZbc6vfJVbV7+Ewo6+1UmcTuYnlzL07WZGpJZx4o/j93r+evZ8nud5P+/n9lEURSlPyNe+C9Vxo9QrtdY119FjjGdeaTSHFKOv3+L2SssnvwA9xVXplbZnMfHK2sRH6N3Dm7AnT77M+7wR4Nnbh3wBb3NCEThNUbnYD8VcxL5vKbjmxgXnsccdvOuV+sSWWuI6vvJuLFwJrhr7C3aruwxe2tQ56GptGna7cl+H5P96CX226xx+peGFxDmyLoD389Zr5J3QCi+l+0/8lSfDsFvluehacC31WW9PuKVeP+zqlB9yJT70vvCOE36fMvGzI9rRDXVDFX6Xwp/g9+2yhZ/lIK+R7CP821rfIkOjpe6yPPpvhmTC01Y/taCn72ReyoNtMofo0+TT6/+ij9byHa/wC9qQA3//sIe8rzhwB3kytY48wZHgmPE1EnfVZ8BPXpwVzjbeXWnUre8wvyF3Ly9BPhxFPvXDO/D1gCp42vHvPNKHIPIpVh7xSoUbXa3OwN+uSrzKe8lKmU/dfPZDTYmROXycQb9UdzF9trv20EfrwS2Zu98smWPfdJlfeIX049Tvgn96Pf7asff0W4115vE+OBCeeuMq8R95QvZtdRR+SmEweEp7xz/I6psv8P/DHzzd7S/z3+JuAC9snPALHwMf5bccpBnpAc9w9ZLXCJhHPuP9SeE/ehB1W13lcg/t99kHLWia7N/QYu7CSveYyIGR8DL2K3Kf9X346Wdc7Il2pScLe3P8Pex1BczTPt9HvdqbtfRF60zl3UhpkP1w3mtErpsmc+339G/kxUHw1tc0Pgcvu1D6tPAA+Sz/X7GrWxfJnR9dBL6V1il+YZvgoSf1UofRlSH7l6AizVkR3KOZUijz/9wj+5DYJHOpSGKPNOd/3Jfhe5a8yt2Of8HxCRG8OzNlzpWOx9ivBb4m78C51KXHyd7by8bLfe3vesq7Y84V4goqaogLrEc3/HzJa28skf+uKKmauNSdveiTXsid3S6Cr5YdxN1rI9uJtwKGNoP3ciZztZccl/sfEyV7ED2YPhmZudLHYWN78G/olHk5jiCVjB+Rutmf/PoeJ/uozj7Mnui+6fTNCM3m/7VutYFnB3j4H/Tkg9Rv9y9jnlZpDfto7x0LHzMijr7oU3ZRv5bVwvy1Y74yzwn9aqRuHd5a8psq7X+A5tWt\",\"msLevel\":2,\"retentionTime\":753.2,\"missedCleavages\":0,\"modifications\":[{\"@type\":\"IdentifiedModification\",\"neutralLoss\":null,\"positionMap\":[{\"key\":1,\"value\":[{\"@type\":\"CvParam\",\"cvLabel\":\"MS\",\"accession\":\"MS:1003147\",\"name\":\"PTMProphet probability\",\"value\":\"0.3333\"}]}],\"modification\":{\"@type\":\"CvParam\",\"cvLabel\":\"UNIMOD\",\"accession\":\"UNIMOD:7\",\"name\":\"Deamidated\",\"value\":\"0.984016\"},\"attributes\":[]},{\"@type\":\"IdentifiedModification\",\"neutralLoss\":null,\"positionMap\":[{\"key\":8,\"value\":[{\"@type\":\"CvParam\",\"cvLabel\":\"MS\",\"accession\":\"MS:1003147\",\"name\":\"PTMProphet probability\",\"value\":\"0.774\"}]}],\"modification\":{\"@type\":\"CvParam\",\"cvLabel\":\"UNIMOD\",\"accession\":\"UNIMOD:21\",\"name\":\"Phospho\",\"value\":\"79.9663\"},\"attributes\":[]}],\"qualityEstimationMethods\":[{\"accession\":\"MS:1001194\",\"name\":\"quality estimation with decoy database\",\"value\":\"true\"}],\"properties\":[{\"accession\":\"PRIDE:0000511\",\"name\":\"Pass submitter threshold\",\"value\":\"true\"}]}";
String arrayLine = "{\"usi\":null,\"spectraUsi\":null,\"assayAccession\":null,\"projectAccession\":null,\"reanalysisAccession\":null,\"proteinAccessions\":null,\"peptideSequence\":null,\"peptidoform\":null,\"scores\":null,\"sampleProperties\":null,\"isDecoy\":null,\"isValid\":null,\"precursorCharge\":null,\"precursorMz\":null,\"bestSearchEngineScore\":null,\"numPeaks\":null,\"msLevel\":2,\"retentionTime\":753.2,\"missedCleavages\":0,\"modifications\":[{\"@type\":\"IdentifiedModification\",\"neutralLoss\":null,\"positionMap\":[{\"key\":1,\"value\":[{\"@type\":\"CvParam\",\"cvLabel\":\"MS\",\"accession\":\"MS:1003147\",\"name\":\"PTMProphet probability\",\"value\":\"0.3333\"}]}],\"modification\":{\"@type\":\"CvParam\",\"cvLabel\":\"UNIMOD\",\"accession\":\"UNIMOD:7\",\"name\":\"Deamidated\",\"value\":\"0.984016\"},\"attributes\":[]},{\"@type\":\"IdentifiedModification\",\"neutralLoss\":null,\"positionMap\":[{\"key\":8,\"value\":[{\"@type\":\"CvParam\",\"cvLabel\":\"MS\",\"accession\":\"MS:1003147\",\"name\":\"PTMProphet probability\",\"value\":\"0.774\"}]}],\"modification\":{\"@type\":\"CvParam\",\"cvLabel\":\"UNIMOD\",\"accession\":\"UNIMOD:21\",\"name\":\"Phospho\",\"value\":\"79.9663\"},\"attributes\":[]}],\"qualityEstimationMethods\":[{\"accession\":\"MS:1001194\",\"name\":\"quality estimation with decoy database\",\"value\":\"true\"}],\"properties\":[{\"accession\":\"PRIDE:0000511\",\"name\":\"Pass submitter threshold\",\"value\":\"true\"}],\"masses\":[639.8155517578125,618.3179931640625,136.07591247558594,610.3189697265625,627.3206787109375,147.07717895507812,242.07672119140625,230.0762939453125,278.1520080566406,484.2738952636719,515.3289184570312,516.3333129882812,184.07164001464844,483.2682189941406,265.14227294921875,519.0947875976562,425.13818359375,967.3564453125,323.13189697265625,372.1324768066406,820.3612670898438,374.1309814453125,407.1304626464844,587.3685302734375,212.0658416748047,447.2523498535156,550.86865234375,586.3673095703125,514.868896484375,229.12750244140625,394.12567138671875,527.1224365234375,447.7519226074219,837.3701782226562,398.1269226074219,111.04457092285156,632.2511596679688,322.1872863769531,667.2576904296875,170.0924072265625,236.4058074951172,246.15504455566406,158.0928497314453,394.6813049316406,130.0862274169922,534.767333984375,534.2755126953125,573.7783813476562,576.780517578125,641.2778930664062,640.2840576171875,708.2850341796875,859.2882080078125,583.2862548828125,707.288330078125,110.07140350341797,101.0714340209961,347.17156982421875,214.08274841308594,640.7870483398438,276.1643981933594,582.790283203125,562.7892456054688,444.29290771484375,552.2924194335938,459.1656188964844,591.2965087890625,592.2965087890625,600.2981567382812,591.7970581054688,601.2993774414062,232.14122009277344,327.16339111328125,584.3009643554688,543.3030395507812,592.8042602539062,600.8043212890625,582.3031005859375,155.0804443359375,571.3067016601562,167.08132934570312,609.311279296875,493.84344482421875,362.21624755859375,423.84283447265625,426.84283447265625,386.2149353027344,432.21533203125,897.4530639648438,560.1981811523438,260.0875244140625,460.838623046875,424.83660888671875,630.7005004882812,694.202392578125,258.08917236328125,269.0892639160156,112.05097198486328,539.205078125,129.1023406982422,810.453369140625,312.085693359375,811.4558715820312,896.4542236328125,379.20904541015625,439.830810546875,244.166015625,360.2004699707031,363.2005920410156,406.8279724121094,953.4739379882812,954.4708862304688,639.7232666015625,596.2259521484375,848.7218017578125,360.701416015625,225.10118103027344,270.0711364746094,557.2279052734375,581.7296752929688,894.4837646484375,182.0388641357422,1051.448486328125,1052.4495849609375,442.19659423828125,370.1933288574219,421.8190002441406,311.6926574707031,590.7365112304688,312.19427490234375,329.1936950683594,302.68829345703125,893.4971923828125,120.08074188232422,323.189453125,599.7482299804688,201.12208557128906,210.1222381591797,328.12298583984375,568.8776245117188,356.121337890625,622.3807983398438,426.1228942871094,133.0606231689453,908.39013671875,426.2460021972656,452.7450256347656,136.06199645996094,228.06141662597656,719.3895263671875,452.2424621582031,185.05526733398438,438.2414855957031,495.86505126953125,175.1190643310547,722.3965454101562,477.8636169433594,408.1151123046875,152.05690002441406,448.73724365234375,497.8601379394531,147.0582275390625,907.4002685546875,147.11341857910156,479.8577880859375,496.85809326171875,115.08692169189453,498.8564147949219,329.1079406738281,282.1089172363281,395.23236083984375,405.7287902832031,936.4215698242188,112.08688354492188,403.60406494140625,127.08666229248047,394.72991943359375,459.8526306152344,450.2279052734375,461.8534851074219,357.1037292480469,311.0986022949219,442.85028076171875,450.850830078125,494.8514404296875,287.098388671875,723.4244384765625,809.4286499023438,724.4224853515625,478.8493347167969,261.100830078125,345.2253112792969,339.0941162109375,994.43408203125,213.04925537109375,289.095947265625,391.0945129394531,443.7199401855469],\"intensities\":[3383.359375,5046.22314453125,5469.8837890625,24065.251953125,3858.4365234375,4208.39501953125,3541.00341796875,9016.9541015625,3368.909912109375,18877.46875,25663.779296875,3093.734619140625,5395.27734375,98487.3046875,3767.2119140625,2467.42529296875,2856.265625,3497.753662109375,2596.706298828125,11534.724609375,12226.3154296875,2685.9482421875,3116.6494140625,11422.6806640625,4279.9443359375,11396.384765625,3141.888916015625,44046.05859375,3069.638671875,2254.124755859375,3613.35009765625,2766.61279296875,5233.306640625,3825.073486328125,3021.970703125,2695.416015625,3025.86083984375,19228.47265625,4240.9169921875,2694.68359375,2451.588134765625,12904.2939453125,3118.18115234375,4247.95751953125,3186.132568359375,9424.25390625,21738.619140625,8529.974609375,3740.875244140625,11266.94140625,55063.09765625,11926.4072265625,3924.8583984375,5271.14306640625,36777.02734375,4983.56884765625,8933.609375,4246.29638671875,3550.901123046875,31193.33984375,2356.053955078125,16600.26953125,7814.3466796875,14085.154296875,4932.11767578125,3554.696533203125,188470.34375,39394.76171875,18990.447265625,117255.3984375,4437.48583984375,4117.81396484375,2421.965087890625,3937.035888671875,18349.576171875,16493.44921875,19111.0,17090.984375,2377.381103515625,4719.76123046875,2988.845703125,99468.7421875,3752.238525390625,9327.875,2775.343994140625,3134.968505859375,11637.615234375,3615.02685546875,5171.9794921875,5055.22900390625,2355.291015625,3122.794677734375,22454.20703125,3230.170166015625,3853.021240234375,4954.84521484375,2632.0390625,21785.43359375,5150.671875,13771.78125,29415.046875,3080.740966796875,4113.68896484375,20161.75,2700.607177734375,3595.614990234375,3665.193359375,22341.072265625,3322.3056640625,4935.072265625,11559.85546875,2948.54443359375,3804.46826171875,8595.787109375,2969.05322265625,2903.0234375,2152.78466796875,5188.0341796875,5376.513671875,3069.400634765625,12089.2060546875,1916.671142578125,12489.74609375,3541.896728515625,3965.322021484375,2351.45361328125,3521.46142578125,31565.9140625,25376.947265625,4402.38525390625,10668.0263671875,3204.824951171875,27489.458984375,2646.308837890625,12044.599609375,3248.53466796875,3110.720947265625,2342.7734375,2678.27294921875,12500.427734375,2938.229248046875,16971.7421875,16513.73046875,2209.622314453125,3030.239501953125,4850.96826171875,5081.43359375,17878.984375,2556.61767578125,12094.04296875,14795.546875,16528.02734375,3101.87255859375,20711.01953125,45269.625,24852.130859375,16601.443359375,11332.6630859375,23486.048828125,4001.112060546875,11135.337890625,4070.55078125,17428.2109375,4273.41357421875,9963.357421875,83832.1328125,4316.1630859375,4781.73193359375,12855.236328125,4082.03955078125,31792.29296875,3741.931640625,3143.661865234375,3235.0458984375,2309.1474609375,12103.18359375,74112.921875,3574.412109375,3525.113037109375,3484.28515625,9741.2529296875,13432.9794921875,8466.4931640625,3016.510498046875,3407.1025390625,12231.458984375,36488.58984375,25117.880859375,10606.0537109375,22294.80859375,3084.249755859375,16753.056640625,10994.3544921875,4577.6142578125,3074.77587890625,10758.2890625,3248.254150390625,9680.939453125]}";
private ObjectMapper objectMapper;
@Before
public void setUp() throws Exception {
this.objectMapper = new ObjectMapper();
}
@Test
public void testDeserialize() {
try {
BinaryArchiveSpectrum spectrum = objectMapper.readValue(spectraLine, BinaryArchiveSpectrum.class);
Assert.assertEquals("mzspec:PRD000902:Rice_leaf_0h_phospho_test1:scan:3817:N[UNIMOD:7]NGSSIGS[UNIMOD:21]PGPGR/2", spectrum.getUsi());
ArchiveSpectrum nonBinSpec = new ArchiveSpectrum(spectrum);
String line = objectMapper.writeValueAsString(nonBinSpec);
Assert.assertEquals(line, arrayLine);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
|
src/test/java/uk/ac/ebi/pride/archive/dataprovider/data/spectra/SpectrumNumberArrayDeserializerTest.java
|
package uk.ac.ebi.pride.archive.dataprovider.data.spectra;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class SpectrumNumberArrayDeserializerTest{
String spectraLine = "{\"usi\":\"mzspec:PRD000902:Rice_leaf_0h_phospho_test1:scan:3817:N[UNIMOD:7]NGSSIGS[UNIMOD:21]PGPGR/2\",\"spectraUsi\":\"mzspec:PXD002222:Rice_leaf_0h_phospho_test1:scan:3817\",\"assayAccession\":\"c0300332f12ec3420b966972b077cdb25520c0ba\",\"projectAccession\":\"PXD002222\",\"reanalysisAccession\":\"PRD000902\",\"proteinAccessions\":[\"Os07t0584500-01\"],\"peptideSequence\":\"NNGSSIGSPGPGR\",\"peptidoform\":\"N[UNIMOD:7]NGSSIGS[UNIMOD:21]PGPGR/2\",\"scores\":[{\"accession\":\"MS:1002257\",\"name\":\"Comet:expectation value\",\"value\":\"2.99E-8\"},{\"accession\":\"MS:1002354\",\"name\":\"PSM-level q-value\",\"value\":\"5.0E-6\"},{\"accession\":\"MS:1002357\",\"name\":\"PSM-level probability\",\"value\":\"1.0\"},{\"accession\":\"MS:1002355\",\"name\":\"PSM-level FDRScore\",\"value\":\"1.2510634038933093E-5\"}],\"sampleProperties\":[{\"accession\":\"EFO:0000324\",\"name\":\"cell type\",\"value\":\"not applicable\"},{\"accession\":\"OBI:0100026\",\"name\":\"organism\",\"value\":\"Oryza sativa\"},{\"accession\":\"EFO:0000635\",\"name\":\"organism part\",\"value\":\"Leaf\"},{\"accession\":\"EFO:0000408\",\"name\":\"disease\",\"value\":\"Xanthomonas oryzae pv. oryzae\"},{\"accession\":\"EFO:0002091\",\"name\":\"biological replicate\",\"value\":\"1\"}],\"isDecoy\":false,\"isValid\":true,\"precursorCharge\":2,\"precursorMz\":640.764810900142,\"bestSearchEngineScore\":{\"accession\":\"MS:1002354\",\"name\":\"PSM-level q-value\",\"value\":\"5.0E-6\"},\"numPeaks\":198,\"masses\":\"eJwllFtsVFUUhjdTi0JjiKiI5bbpxBdiH+TS2MbAYcolaRMCNTFC0Z5OKYp2gCktlpYpZ3pLsBRKJ/AwBjnTC2BJY2wjRAzkMBaMVDBgImGSMYdLjKQBgQolpeXSbz39WXuvy7/W+vdWShlNT+sN9QKbC0A34CnGbpmK7bRG5b7K7x9Hq8K3eRxVMF42jvZmf5C44mW12LF0C+zOAN08z2fYeQu5d1a0bR1HHX+fe5XVVg0mcvZSZ+b8Cvxn+74i/mb7bupO92Ob08p3kOfBujrsGXVf4D/p/xryDGWFyXOvgHtraBp1rDc64et42qvAkSrh9/IocfrRvK/FvsG9kYgXkt8Tp3/9LANezpREA3Ue+kq5H90TkPh+5mI/jpeA/3STx4z7TOp7T1DPWNgL6py78LQWT4Snzp5MXme5R3BlRhP+qxLwMvJypJ9V6Zy7+xuEX9uytfif6a3kvCvOPNz8ieQxYz72YxVkybxWH6Su830/fbsf+bDNI707qVNUXSdYB9r+uMSZtcKz9ALoNrxE38ahcuZifu4T/7IR6W9Lk+TZch50yuaDKuJnProiIXuK3tgAv9Bk0VttUnRS2bGd+52hasEe0NzqZa6qPEX4H07bAx4Iyzw3LkVf6suL9OP4W4izoz+S3/r2h0byBebiZwcuix5zU9YT13kN/uYKr+zN/hD9qaLwNvy7CrGN9hTqGh/3oUuVHxL9rW5EB3pRPfztRX2guSSAbu0LAy3Y/X+Cxm9j0velmMzrfCP7VtnNxLkfeNGt6b0OT+fKJtnXX5ky17+HmrEXn91I/ECgFfv3IGi80yFzessLTzv9E+ah36yhH/NWpeSZUS/22xH2aaQMloMjg+R356V+Cq9XFsi+R08Tp//1bsK+nYvunOGw3Kf+Ck91f6n0kbaNPs3hmOzjXoz5qjklzNH+r0POB/PZm/tkAv+QHg7Lu51Vg+719VzZa/9c/hWdLKE/4+oYujH/uMm71O9G8DcvJ0Ps8WqY+esrE9CfM5BGHv3LrF3wSBb7OV9QCB8dK8G2fhoj3jn1Kn669Rw6cfsyxT61n371mRjvzDnZhz71d4XUM4ty2bO1L5U4ozubc7f5XBH+Pcfln+g8DR+ryyv9H00KHlsj84sE2Y8T7RGdHcoU/8N36FtF7lPfKP1G/odAvui0uEPsg3dCErcGHbnRCP+FFX4PVDte34cdibFHtfs19q/rg/BzG36uMZ4DtkiqlQ==\",\"intensities\":\"eJwllHtMz3sYx7/Nco+Ve23tO8sOM3+g5fzyG9+jhMU5uS2X2Pc4OcOsMJEO85XNLUYZbc6vfJVbV7+Ewo6+1UmcTuYnlzL07WZGpJZx4o/j93r+evZ8nud5P+/n9lEURSlPyNe+C9Vxo9QrtdY119FjjGdeaTSHFKOv3+L2SssnvwA9xVXplbZnMfHK2sRH6N3Dm7AnT77M+7wR4Nnbh3wBb3NCEThNUbnYD8VcxL5vKbjmxgXnsccdvOuV+sSWWuI6vvJuLFwJrhr7C3aruwxe2tQ56GptGna7cl+H5P96CX226xx+peGFxDmyLoD389Zr5J3QCi+l+0/8lSfDsFvluehacC31WW9PuKVeP+zqlB9yJT70vvCOE36fMvGzI9rRDXVDFX6Xwp/g9+2yhZ/lIK+R7CP821rfIkOjpe6yPPpvhmTC01Y/taCn72ReyoNtMofo0+TT6/+ij9byHa/wC9qQA3//sIe8rzhwB3kytY48wZHgmPE1EnfVZ8BPXpwVzjbeXWnUre8wvyF3Ly9BPhxFPvXDO/D1gCp42vHvPNKHIPIpVh7xSoUbXa3OwN+uSrzKe8lKmU/dfPZDTYmROXycQb9UdzF9trv20EfrwS2Zu98smWPfdJlfeIX049Tvgn96Pf7asff0W4115vE+OBCeeuMq8R95QvZtdRR+SmEweEp7xz/I6psv8P/DHzzd7S/z3+JuAC9snPALHwMf5bccpBnpAc9w9ZLXCJhHPuP9SeE/ehB1W13lcg/t99kHLWia7N/QYu7CSveYyIGR8DL2K3Kf9X346Wdc7Il2pScLe3P8Pex1BczTPt9HvdqbtfRF60zl3UhpkP1w3mtErpsmc+339G/kxUHw1tc0Pgcvu1D6tPAA+Sz/X7GrWxfJnR9dBL6V1il+YZvgoSf1UofRlSH7l6AizVkR3KOZUijz/9wj+5DYJHOpSGKPNOd/3Jfhe5a8yt2Of8HxCRG8OzNlzpWOx9ivBb4m78C51KXHyd7by8bLfe3vesq7Y84V4goqaogLrEc3/HzJa28skf+uKKmauNSdveiTXsid3S6Cr5YdxN1rI9uJtwKGNoP3ciZztZccl/sfEyV7ED2YPhmZudLHYWN78G/olHk5jiCVjB+Rutmf/PoeJ/uozj7Mnui+6fTNCM3m/7VutYFnB3j4H/Tkg9Rv9y9jnlZpDfto7x0LHzMijr7oU3ZRv5bVwvy1Y74yzwn9aqRuHd5a8psq7X+A5tWt\",\"msLevel\":2,\"retentionTime\":753.2,\"missedCleavages\":0,\"modifications\":[{\"@type\":\"IdentifiedModification\",\"neutralLoss\":null,\"positionMap\":[{\"key\":1,\"value\":[{\"@type\":\"CvParam\",\"cvLabel\":\"MS\",\"accession\":\"MS:1003147\",\"name\":\"PTMProphet probability\",\"value\":\"0.3333\"}]}],\"modification\":{\"@type\":\"CvParam\",\"cvLabel\":\"UNIMOD\",\"accession\":\"UNIMOD:7\",\"name\":\"Deamidated\",\"value\":\"0.984016\"},\"attributes\":[]},{\"@type\":\"IdentifiedModification\",\"neutralLoss\":null,\"positionMap\":[{\"key\":8,\"value\":[{\"@type\":\"CvParam\",\"cvLabel\":\"MS\",\"accession\":\"MS:1003147\",\"name\":\"PTMProphet probability\",\"value\":\"0.774\"}]}],\"modification\":{\"@type\":\"CvParam\",\"cvLabel\":\"UNIMOD\",\"accession\":\"UNIMOD:21\",\"name\":\"Phospho\",\"value\":\"79.9663\"},\"attributes\":[]}],\"qualityEstimationMethods\":[{\"accession\":\"MS:1001194\",\"name\":\"quality estimation with decoy database\",\"value\":\"true\"}],\"properties\":[{\"accession\":\"PRIDE:0000511\",\"name\":\"Pass submitter threshold\",\"value\":\"true\"}]}";
private ObjectMapper objectMapper;
@Before
public void setUp() throws Exception {
this.objectMapper = new ObjectMapper();
}
@Test
public void testDeserialize() {
try {
BinaryArchiveSpectrum spectrum = objectMapper.readValue(spectraLine, BinaryArchiveSpectrum.class);
Assert.assertEquals("mzspec:PRD000902:Rice_leaf_0h_phospho_test1:scan:3817:N[UNIMOD:7]NGSSIGS[UNIMOD:21]PGPGR/2", spectrum.getUsi());
ArchiveSpectrum nonBinSpec = new ArchiveSpectrum(spectrum);
String line = objectMapper.writeValueAsString(nonBinSpec);
System.out.println(line);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}
|
Final test for new SpectrumArchive
|
src/test/java/uk/ac/ebi/pride/archive/dataprovider/data/spectra/SpectrumNumberArrayDeserializerTest.java
|
Final test for new SpectrumArchive
|
|
Java
|
bsd-3-clause
|
0975dff323b088eef16fdab178d78a1ffac8934b
| 0
|
mdiggory/dryad-repo,mdiggory/dryad-repo,jimallman/dryad-repo,ojacobson/dryad-repo,jamie-dryad/dryad-repo,jimallman/dryad-repo,rnathanday/dryad-repo,rnathanday/dryad-repo,mdiggory/dryad-repo,ojacobson/dryad-repo,rnathanday/dryad-repo,jamie-dryad/dryad-repo,jamie-dryad/dryad-repo,jamie-dryad/dryad-repo,rnathanday/dryad-repo,rnathanday/dryad-repo,ojacobson/dryad-repo,mdiggory/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,rnathanday/dryad-repo,jimallman/dryad-repo,jimallman/dryad-repo,mdiggory/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,jamie-dryad/dryad-repo,ojacobson/dryad-repo
|
package org.dspace.submit.utils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.DCValue;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.workflow.DryadWorkflowUtils;
import org.dspace.workflow.WorkflowItem;
import java.io.FileInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* Created by IntelliJ IDEA.
* User: fabio.bolognesi
* Date: 9/7/11
* Time: 9:47 AM
* To change this template use File | Settings | File Templates.
*/
public class DryadJournalSubmissionUtils {
private static Logger log = Logger.getLogger(DryadJournalSubmissionUtils.class);
// Reading DryadJournalSubmission.properties
public static final String FULLNAME = "fullname";
public static final String METADATADIR = "metadataDir";
public static final String INTEGRATED = "integrated";
public static final String PUBLICATION_BLACKOUT = "publicationBlackout";
public static final String NOTIFY_ON_REVIEW = "notifyOnReview";
public static final String NOTIFY_ON_ARCHIVE = "notifyOnArchive";
public static final String JOURNAL_ID = "journalID";
public static final java.util.Map<String, Map<String, String>> journalProperties = new HashMap<String, Map<String, String>>();
static{
String journalPropFile = ConfigurationManager.getProperty("submit.journal.config");
Properties properties = new Properties();
try {
properties.load(new FileInputStream(journalPropFile));
String journalTypes = properties.getProperty("journal.order");
for (int i = 0; i < journalTypes.split(",").length; i++) {
String journalType = journalTypes.split(",")[i].trim();
String str = "journal." + journalType + ".";
Map<String, String> map = new HashMap<String, String>();
map.put(FULLNAME, properties.getProperty(str + FULLNAME));
map.put(METADATADIR, properties.getProperty(str + METADATADIR));
map.put(INTEGRATED, properties.getProperty(str + INTEGRATED));
map.put(PUBLICATION_BLACKOUT, properties.getProperty(str + PUBLICATION_BLACKOUT, "false"));
map.put(NOTIFY_ON_REVIEW, properties.getProperty(str + NOTIFY_ON_REVIEW));
map.put(NOTIFY_ON_ARCHIVE, properties.getProperty(str + NOTIFY_ON_ARCHIVE));
map.put(JOURNAL_ID, journalType);
String key = properties.getProperty(str + FULLNAME);
journalProperties.put(key, map);
}
}catch (IOException e) {
log.error("Error while loading journal properties", e);
}
}
public static boolean isJournalBlackedOut(Context context, Item item, Collection collection) throws SQLException {
// get Journal
Item dataPackage=item;
if(!isDataPackage(collection))
dataPackage = DryadWorkflowUtils.getDataPackage(context, item);
DCValue[] journalFullNames = dataPackage.getMetadata("prism.publicationName");
String journalFullName=null;
if(journalFullNames!=null && journalFullNames.length > 0){
journalFullName=journalFullNames[0].value;
}
// get journal's blackout setting
Map<String, String> values = journalProperties.get(journalFullName);
// journal is blacked out if its blackout setting is true or if it has no setting
String isBlackedOut = null;
if(values!=null && values.size()>0)
isBlackedOut = values.get(PUBLICATION_BLACKOUT);
if(isBlackedOut==null || isBlackedOut.equals("true"))
return true;
return false;
}
private static boolean isDataPackage(Collection coll) throws SQLException {
return coll.getHandle().equals(ConfigurationManager.getProperty("submit.publications.collection"));
}
public static String findKeyByFullname(String fullname){
Map<String, String> props = journalProperties.get(fullname);
if(props!=null)
return props.get(DryadJournalSubmissionUtils.JOURNAL_ID);
return null;
}
}
|
dspace/modules/api/src/main/java/org/dspace/submit/utils/DryadJournalSubmissionUtils.java
|
package org.dspace.submit.utils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.DCValue;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.workflow.DryadWorkflowUtils;
import org.dspace.workflow.WorkflowItem;
import java.io.FileInputStream;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* Created by IntelliJ IDEA.
* User: fabio.bolognesi
* Date: 9/7/11
* Time: 9:47 AM
* To change this template use File | Settings | File Templates.
*/
public class DryadJournalSubmissionUtils {
private static Logger log = Logger.getLogger(DryadJournalSubmissionUtils.class);
// Reading DryadJournalSubmission.properties
public static final String FULLNAME = "fullname";
public static final String METADATADIR = "metadataDir";
public static final String INTEGRATED = "integrated";
public static final String PUBLICATION_BLACKOUT = "publicationBlackout";
public static final String NOTIFY_ON_REVIEW = "notifyOnReview";
public static final String NOTIFY_ON_ARCHIVE = "notifyOnArchive";
public static final String JOURNAL_ID = "journalID";
public static final java.util.Map<String, Map<String, String>> journalProperties = new HashMap<String, Map<String, String>>();
static{
String journalPropFile = ConfigurationManager.getProperty("submit.journal.config");
Properties properties = new Properties();
try {
properties.load(new FileInputStream(journalPropFile));
String journalTypes = properties.getProperty("journal.order");
for (int i = 0; i < journalTypes.split(",").length; i++) {
String journalType = journalTypes.split(",")[i].trim();
String str = "journal." + journalType + ".";
Map<String, String> map = new HashMap<String, String>();
map.put(FULLNAME, properties.getProperty(str + FULLNAME));
map.put(METADATADIR, properties.getProperty(str + METADATADIR));
map.put(INTEGRATED, properties.getProperty(str + INTEGRATED));
map.put(PUBLICATION_BLACKOUT, properties.getProperty(str + PUBLICATION_BLACKOUT, "false"));
map.put(NOTIFY_ON_REVIEW, properties.getProperty(str + NOTIFY_ON_REVIEW));
map.put(NOTIFY_ON_ARCHIVE, properties.getProperty(str + NOTIFY_ON_ARCHIVE));
map.put(JOURNAL_ID, journalType);
String key = properties.getProperty(str + FULLNAME);
journalProperties.put(key, map);
}
}catch (IOException e) {
log.error("Error while loading journal properties", e);
}
}
public static boolean isJournalBlackedOut(Context context, Item item, Collection collection) throws SQLException {
// get Journal
Item dataPackage=item;
if(!isDataPackage(collection))
dataPackage = DryadWorkflowUtils.getDataPackage(context, item);
DCValue[] journalFullNames = dataPackage.getMetadata("prism.publicationName");
String journalFullName=null;
if(journalFullNames!=null && journalFullNames.length > 0){
journalFullName=journalFullNames[0].value;
}
// show "Publish immediately" only if publicationBlackout=false or not defined in DryadJournalSubmission.properties.
Map<String, String> values = journalProperties.get(journalFullName);
String isBlackedOut = null;
if(values!=null && values.size()>0)
isBlackedOut = values.get(PUBLICATION_BLACKOUT);
if(isBlackedOut==null || isBlackedOut.equals("false"))
return false;
return true;
}
private static boolean isDataPackage(Collection coll) throws SQLException {
return coll.getHandle().equals(ConfigurationManager.getProperty("submit.publications.collection"));
}
public static String findKeyByFullname(String fullname){
Map<String, String> props = journalProperties.get(fullname);
if(props!=null)
return props.get(DryadJournalSubmissionUtils.JOURNAL_ID);
return null;
}
}
|
for non-integrated journals, the default setting is blackout=true
|
dspace/modules/api/src/main/java/org/dspace/submit/utils/DryadJournalSubmissionUtils.java
|
for non-integrated journals, the default setting is blackout=true
|
|
Java
|
bsd-3-clause
|
c5eaedb09c8e56e82c83ebca30e8540721b4f35a
| 0
|
KommuSoft/jahmm,KommuSoft/jahmm
|
/*
* Copyright (c) 2004-2009, Jean-Marc François. All Rights Reserved.
* Licensed under the New BSD license. See the LICENSE file.
*/
package be.ac.ulg.montefiore.run.jahmm;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
/**
* Main Input-Hmm class; it implements an Hidden Markov Model with an input
* layer. An IHMM is composed of:
* <ul>
* <li><i>states</i>: each state has a given probability of being initial
* (<i>pi</i>) and an associated observation probability function (<i>opdf</i>).
* Each state is associated to an index; the first state is numbered 0, the last
* n-1 (where n is the number of states in the HMM); this number is given as an
* argument to the various functions to refer to the matching state. </li>
* <li><i>transition probabilities</i>: that is, the probability of going from
* state <i>i</i> to state <i>j</i> (<i>a<sub>i,j</sub></i>).</li>
* <li><i>inputs</i>: a sequence of inputs.</li>
* </ul>
* <p>
* Important objects extensively used with HMMs are {@link Observation
* Observation}s, observation sequences and set of observation sequences. An
* observation sequence is simply a {@link List List} of
* {@link Observation Observation}s (in the right order, the i-th element of the
* vector being the i-th element of the sequence). A set of observation
* sequences is a {@link java.util.List List} of such sequences.
*
* @param <O> the type of observations
*/
public class IHmm<O extends Observation> extends HmmBase<O, double[][][], ArrayList<Opdf<O>>> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = Logger.getLogger(IHmm.class.getName());
/**
* Creates a new IHMM. Each state has the same <i>pi</i> value and the
* transition probabilities are all equal.
*
* @param nbSymbols The (strictly positive) number of input symbols of the
* IHMM.
* @param nbStates The (strictly positive) number of states of the IHMM.
* @param opdfFactory A pdf generator that is used to build the pdfs
* associated to each state.
*/
public IHmm(int nbSymbols, int nbStates, OpdfFactory<? extends Opdf<O>> opdfFactory) {
if (nbSymbols <= 0) {
throw new IllegalArgumentException("Number of symbols must be strictly positive");
}
pi = new double[nbStates];
a = new double[nbStates][nbSymbols][nbStates];
b = new ArrayList<>(nbStates);
double ac = 1. / (nbStates * nbSymbols);
for (int i = 0; i < nbStates; i++) {
pi[i] = 1. / nbStates;
b.add(opdfFactory.factor());
for (int j = 0; j < nbSymbols; j++) {
for (int k = 0; k < nbStates; k++) {
a[i][j][k] = ac;
}
}
}
}
/**
* Creates a new IHMM. All the HMM parameters are given as arguments.
*
* @param pi The initial probability values. <code>pi[i]</code> is the
* initial probability of state <code>i</code>. This array is copied.
* @param a The state transition probability array. <code>a[i][j][k]</code>
* is the probability of going from state <code>k</code> given input symbol
* <code>j</code> to state <code>j</code>. This array is copied.
* @param opdfs The observation distributions. <code>opdfs.get(i)</code> is
* the observation distribution associated with state <code>i</code>. The
* distributions are not copied.
*/
public IHmm(double[] pi, double[][][] a, List<? extends Opdf<O>> opdfs) {
}
/**
* Creates a new IHMM. The parameters of the created HMM set to
* <code>null</code> specified and must be set using the appropriate
* methods.
*
* @param nbSymbols The (strictly positive) number of states of the HMM.
* @param nbStates The (strictly positive) number of states of the HMM.
*/
protected IHmm(int nbSymbols, int nbStates) {
if (nbSymbols <= 0) {
throw new IllegalArgumentException("Number of symbols must be strictly positive");
}
if (nbStates <= 0) {
throw new IllegalArgumentException("Number of states must be strictly positive");
}
pi = new double[nbStates];
a = new double[nbStates][nbSymbols][nbStates];
b = new ArrayList<>(nbStates);
double ac = 1. / (nbStates * nbSymbols);
for (int i = 0; i < nbStates; i++) {
pi[i] = 1. / nbStates;
for (int j = 0; j < nbSymbols; j++) {
for (int k = 0; k < nbStates; k++) {
a[i][j][k] = ac;
}
}
}
}
/**
* Returns the number of states of this HMM.
*
* @return The number of states of this HMM.
*/
@Override
public int nbStates() {
return a.length;
}
/**
* Returns the number of symbols of this IHMM.
*
* @return The number of symbols of this IHMM.
*/
public int nbSymbols() {
return a[0x00].length;
}
/**
* Creates a duplicate object of the given input Hidden Markov Model.
*
* @return An IHHM that contains the same date as this object.
* @throws CloneNotSupportedException An exception such that classes lower
* in the hierarchy can fail to clone.
*/
@Override
public IHmm<O> clone()
throws CloneNotSupportedException {
IHmm<O> ihmm = new IHmm<>(nbSymbols(), nbStates());
//TODO
return ihmm;
}
/**
* Returns the probability associated with the transition going from state
* <i>i</i> to state <i>j</i> (<i>a<sub>i,j</sub></i>).
*
* @param i The first state number such that
* <code>0 ≤ i < nbStates()</code>.
* @param j The second state number such that
* <code>0 ≤ j < nbStates()</code>.
* @return The probability associated to the transition going from
* <code>i</code> to state <code>j</code> regardless of the input.
*/
@Override
public double getAij(int i, int j) {
double total = 0.0d;
int n = a[0x00].length;
for (int k = 0x00; k < n; k++) {
total += a[i][k][j];
}
return total;
}
/**
* Returns the probability associated with the transition going from state
* <i>i</i> to state <i>j</i> (<i>a<sub>i,j</sub></i>).
*
* @param i The first state number such that
* <code>0 ≤ i < nbStates()</code>.
* @param j The second state number such that
* <code>0 ≤ j < nbStates()</code>.
* @param k The input symbol such that
* <code>0 ≤ k < nbSymbols()</code>.
* @return The probability associated to the transition going from
* <code>i</code> to state <code>j</code>.
*/
public double getAij(int i, int k, int j) {
return a[i][k][j];
}
/**
* Gives a description of this IHMM.
*
* @return A textual description of this IHMM.
*/
@Override
public String toString() {
return toString(NumberFormat.getInstance());
}
/**
* Gives a description of this HMM.
*
* @param nf A number formatter used to print numbers (e.g. Aij values).
* @return A textual description of this HMM.
*/
@Override
public String toString(NumberFormat nf) {
String s = "HMM with " + nbStates() + " state(s)\n";
for (int i = 0; i < nbStates(); i++) {
s += "\nState " + i + "\n";
s += " Pi: " + getPi(i) + "\n";
s += " Aij:";
for (int j = 0; j < nbStates(); j++) {
s += " " + nf.format(getAij(i, j));
}
s += "\n";
s += " Opdf: " + getOpdf(i).toString(nf) + "\n";
}
return s;
}
@Override
public void fold(int n) {
int m = pi.length;
double[] pia = new double[m], pib = this.pi, tmp;
for(int i = 0x00; i < n; i++) {
tmp = pia;
pia = pib;
pib = tmp;
for(int j = 0x00; j < m; j++) {
double tot = 0.0d;
for(int k = 0x00; k < m; k++) {
tot += 1.0d;//TODO
}
pib[j] = tot;
}
}
if((n&0x01) != 0x00) {
this.pi = pib;
}
}
}
|
jamhh/src/be/ac/ulg/montefiore/run/jahmm/IHmm.java
|
/*
* Copyright (c) 2004-2009, Jean-Marc François. All Rights Reserved.
* Licensed under the New BSD license. See the LICENSE file.
*/
package be.ac.ulg.montefiore.run.jahmm;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
/**
* Main Input-Hmm class; it implements an Hidden Markov Model with an input
* layer. An IHMM is composed of:
* <ul>
* <li><i>states</i>: each state has a given probability of being initial
* (<i>pi</i>) and an associated observation probability function (<i>opdf</i>).
* Each state is associated to an index; the first state is numbered 0, the last
* n-1 (where n is the number of states in the HMM); this number is given as an
* argument to the various functions to refer to the matching state. </li>
* <li><i>transition probabilities</i>: that is, the probability of going from
* state <i>i</i> to state <i>j</i> (<i>a<sub>i,j</sub></i>).</li>
* <li><i>inputs</i>: a sequence of inputs.</li>
* </ul>
* <p>
* Important objects extensively used with HMMs are {@link Observation
* Observation}s, observation sequences and set of observation sequences. An
* observation sequence is simply a {@link List List} of
* {@link Observation Observation}s (in the right order, the i-th element of the
* vector being the i-th element of the sequence). A set of observation
* sequences is a {@link java.util.List List} of such sequences.
*
* @param <O> the type of observations
*/
public class IHmm<O extends Observation> extends HmmBase<O, double[][][], ArrayList<Opdf<O>>> {
private static final long serialVersionUID = 1L;
private static final Logger LOG = Logger.getLogger(IHmm.class.getName());
/**
* Creates a new IHMM. Each state has the same <i>pi</i> value and the
* transition probabilities are all equal.
*
* @param nbSymbols The (strictly positive) number of input symbols of the
* IHMM.
* @param nbStates The (strictly positive) number of states of the IHMM.
* @param opdfFactory A pdf generator that is used to build the pdfs
* associated to each state.
*/
public IHmm(int nbSymbols, int nbStates, OpdfFactory<? extends Opdf<O>> opdfFactory) {
if (nbSymbols <= 0) {
throw new IllegalArgumentException("Number of symbols must be strictly positive");
}
pi = new double[nbStates];
a = new double[nbStates][nbSymbols][nbStates];
b = new ArrayList<>(nbStates);
double ac = 1. / (nbStates * nbSymbols);
for (int i = 0; i < nbStates; i++) {
pi[i] = 1. / nbStates;
b.add(opdfFactory.factor());
for (int j = 0; j < nbSymbols; j++) {
for (int k = 0; k < nbStates; k++) {
a[i][j][k] = ac;
}
}
}
}
/**
* Creates a new IHMM. All the HMM parameters are given as arguments.
*
* @param pi The initial probability values. <code>pi[i]</code> is the
* initial probability of state <code>i</code>. This array is copied.
* @param a The state transition probability array. <code>a[i][j][k]</code>
* is the probability of going from state <code>k</code> given input symbol
* <code>j</code> to state <code>j</code>. This array is copied.
* @param opdfs The observation distributions. <code>opdfs.get(i)</code> is
* the observation distribution associated with state <code>i</code>. The
* distributions are not copied.
*/
public IHmm(double[] pi, double[][][] a, List<? extends Opdf<O>> opdfs) {
}
/**
* Creates a new IHMM. The parameters of the created HMM set to
* <code>null</code> specified and must be set using the appropriate
* methods.
*
* @param nbSymbols The (strictly positive) number of states of the HMM.
* @param nbStates The (strictly positive) number of states of the HMM.
*/
protected IHmm(int nbSymbols, int nbStates) {
if (nbSymbols <= 0) {
throw new IllegalArgumentException("Number of symbols must be strictly positive");
}
if (nbStates <= 0) {
throw new IllegalArgumentException("Number of states must be strictly positive");
}
pi = new double[nbStates];
a = new double[nbStates][nbSymbols][nbStates];
b = new ArrayList<>(nbStates);
double ac = 1. / (nbStates * nbSymbols);
for (int i = 0; i < nbStates; i++) {
pi[i] = 1. / nbStates;
for (int j = 0; j < nbSymbols; j++) {
for (int k = 0; k < nbStates; k++) {
a[i][j][k] = ac;
}
}
}
}
/**
* Returns the number of states of this HMM.
*
* @return The number of states of this HMM.
*/
@Override
public int nbStates() {
return a.length;
}
/**
* Returns the number of symbols of this IHMM.
*
* @return The number of symbols of this IHMM.
*/
public int nbSymbols() {
return a[0x00].length;
}
/**
* Creates a duplicate object of the given input Hidden Markov Model.
*
* @return An IHHM that contains the same date as this object.
* @throws CloneNotSupportedException An exception such that classes lower
* in the hierarchy can fail to clone.
*/
@Override
public IHmm<O> clone()
throws CloneNotSupportedException {
IHmm<O> ihmm = new IHmm<>(nbSymbols(), nbStates());
//TODO
return ihmm;
}
/**
* Returns the probability associated with the transition going from state
* <i>i</i> to state <i>j</i> (<i>a<sub>i,j</sub></i>).
*
* @param i The first state number such that
* <code>0 ≤ i < nbStates()</code>.
* @param j The second state number such that
* <code>0 ≤ j < nbStates()</code>.
* @return The probability associated to the transition going from
* <code>i</code> to state <code>j</code> regardless of the input.
*/
@Override
public double getAij(int i, int j) {
double total = 0.0d;
int n = a[0x00].length;
for (int k = 0x00; k < n; k++) {
total += a[i][k][j];
}
return total;
}
/**
* Returns the probability associated with the transition going from state
* <i>i</i> to state <i>j</i> (<i>a<sub>i,j</sub></i>).
*
* @param i The first state number such that
* <code>0 ≤ i < nbStates()</code>.
* @param j The second state number such that
* <code>0 ≤ j < nbStates()</code>.
* @param k The input symbol such that
* <code>0 ≤ k < nbSymbols()</code>.
* @return The probability associated to the transition going from
* <code>i</code> to state <code>j</code>.
*/
public double getAij(int i, int k, int j) {
return a[i][k][j];
}
/**
* Gives a description of this IHMM.
*
* @return A textual description of this IHMM.
*/
@Override
public String toString() {
return toString(NumberFormat.getInstance());
}
/**
* Gives a description of this HMM.
*
* @param nf A number formatter used to print numbers (e.g. Aij values).
* @return A textual description of this HMM.
*/
@Override
public String toString(NumberFormat nf) {
String s = "HMM with " + nbStates() + " state(s)\n";
for (int i = 0; i < nbStates(); i++) {
s += "\nState " + i + "\n";
s += " Pi: " + getPi(i) + "\n";
s += " Aij:";
for (int j = 0; j < nbStates(); j++) {
s += " " + nf.format(getAij(i, j));
}
s += "\n";
s += " Opdf: " + getOpdf(i).toString(nf) + "\n";
}
return s;
}
}
|
folding support
|
jamhh/src/be/ac/ulg/montefiore/run/jahmm/IHmm.java
|
folding support
|
|
Java
|
bsd-3-clause
|
59b43a97161c98e4c9ae05ae923d87d920dfa973
| 0
|
NCIP/caadapter,NCIP/caadapter,NCIP/caadapter
|
/**
* <!-- LICENSE_TEXT_START -->
The contents of this file are subject to the caAdapter Software License (the "License"). You may obtain a copy of the License at the following location:
[caAdapter Home Directory]\docs\caAdapter_license.txt, or at:
http://ncicb.nci.nih.gov/infrastructure/cacore_overview/caadapter/indexContent/docs/caAdapter_License
* <!-- LICENSE_TEXT_END -->
*/
package gov.nih.nci.caadapter.ui.mapping.mms;
import gov.nih.nci.caadapter.common.BaseResult;
import gov.nih.nci.caadapter.common.Log;
import gov.nih.nci.caadapter.common.Message;
import gov.nih.nci.caadapter.common.MessageResources;
import gov.nih.nci.caadapter.common.MetaObject;
import gov.nih.nci.caadapter.common.MetaObjectImpl;
import gov.nih.nci.caadapter.common.MetaParser;
import gov.nih.nci.caadapter.common.SDKMetaData;
import gov.nih.nci.caadapter.common.util.Config;
import gov.nih.nci.caadapter.common.util.FileUtil;
import gov.nih.nci.caadapter.common.util.GeneralUtilities;
import gov.nih.nci.caadapter.common.util.CaadapterUtil;
import gov.nih.nci.caadapter.common.util.Iso21090Util;
import gov.nih.nci.caadapter.common.validation.ValidatorResult;
import gov.nih.nci.caadapter.common.validation.ValidatorResults;
import gov.nih.nci.caadapter.hl7.map.impl.MappingImpl;
import gov.nih.nci.caadapter.mms.generator.CumulativeMappingGenerator;
import gov.nih.nci.caadapter.mms.generator.HBMGenerateCacoreIntegrator;
import gov.nih.nci.caadapter.common.metadata.AssociationMetadata;
import gov.nih.nci.caadapter.common.metadata.AttributeMetadata;
import gov.nih.nci.caadapter.common.metadata.ModelMetadata;
import gov.nih.nci.caadapter.common.metadata.ObjectMetadata;
import gov.nih.nci.caadapter.ui.common.ActionConstants;
import gov.nih.nci.caadapter.ui.common.DefaultSettings;
import gov.nih.nci.caadapter.ui.common.MappableNode;
import gov.nih.nci.caadapter.ui.common.MappingFileSynchronizer;
import gov.nih.nci.caadapter.ui.common.actions.TreeCollapseAllAction;
import gov.nih.nci.caadapter.ui.common.actions.TreeExpandAllAction;
import gov.nih.nci.caadapter.ui.common.context.ContextManager;
import gov.nih.nci.caadapter.ui.common.context.MenuConstants;
import gov.nih.nci.caadapter.ui.common.tree.DefaultSourceTreeNode;
import gov.nih.nci.caadapter.ui.common.tree.DefaultTargetTreeNode;
import gov.nih.nci.caadapter.ui.common.tree.TreeDefaultDropTransferHandler;
import gov.nih.nci.caadapter.ui.mapping.AbstractMappingPanel;
import gov.nih.nci.caadapter.ui.mapping.MappingMiddlePanel;
import gov.nih.nci.caadapter.ui.mapping.hl7.actions.RefreshMapAction;
import gov.nih.nci.caadapter.ui.mapping.mms.actions.MmsTargetTreeDropTransferHandler;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAttribute;
import gov.nih.nci.ncicb.xmiinout.domain.UMLClass;
import gov.nih.nci.ncicb.xmiinout.domain.UMLDependency;
import gov.nih.nci.ncicb.xmiinout.domain.UMLInterface;
import gov.nih.nci.ncicb.xmiinout.domain.UMLModel;
import gov.nih.nci.ncicb.xmiinout.domain.UMLPackage;
import gov.nih.nci.ncicb.xmiinout.domain.UMLTaggedValue;
import gov.nih.nci.ncicb.xmiinout.util.ModelUtil;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.dnd.DnDConstants;
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.FileReader;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRootPane;
import javax.swing.JSplitPane;
import javax.swing.JToolBar;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeNode;
/**
* The class is the main panel to construct the UI and initialize the utilities
* to facilitate mapping functions.
*
* @author OWNER: Ye Wu
* @author LAST UPDATE $Author: wangeug $
* @since caAdatper v3.2
* @version $Revision: 1.43 $
* @date $Date: 2009-09-30 17:08:26 $
*/
public class Object2DBMappingPanel extends AbstractMappingPanel {
private static final String LOGID = "$RCSfile: Object2DBMappingPanel.java,v $";
public static String RCSID = "$Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/mapping/mms/Object2DBMappingPanel.java,v 1.43 2009-09-30 17:08:26 wangeug Exp $";
private MmsTargetTreeDropTransferHandler mmsTargetTreeDropTransferHandler = null;
private static final String SELECT_XMI = "Open XMI file...";
private static final String SELECT_XSD = "Open XSD file...";
private static final String GENERATE_HBM = "Generate HBM Files";
public Object2DBMappingPanel() {
this("defaultObjectToDatabaseMapping");
}
public Object2DBMappingPanel(String name) {
this.setBorder(BorderFactory.createEmptyBorder());
this.setLayout(new BorderLayout());
this.add(getCenterPanel(false), BorderLayout.CENTER);
fileSynchronizer = new MappingFileSynchronizer(this);
}
protected JPanel getTopLevelLeftPanel() {
JPanel topCenterPanel = new JPanel(new BorderLayout());
topCenterPanel.setBorder(BorderFactory.createEmptyBorder());
JSplitPane centerSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
DefaultSettings.setDefaultFeatureForJSplitPane(centerSplitPane);
// construct source panel
sourceButtonPanel = new JPanel(new BorderLayout());
sourceButtonPanel.setBorder(BorderFactory.createEmptyBorder());
sourceLocationPanel = new JPanel(new BorderLayout(2, 0));
sourceLocationPanel.setBorder(BorderFactory.createEmptyBorder());
sourceTreeCollapseAllAction = new TreeCollapseAllAction(sTree);
sourceTreeExpandAllAction = new TreeExpandAllAction(sTree);
JToolBar sourceTreeToolBar = new JToolBar("Source Tree Tool Bar");
sourceTreeToolBar.setFloatable(false);
sourceTreeToolBar.add(sourceTreeExpandAllAction);
sourceTreeToolBar.add(sourceTreeCollapseAllAction);
sourceLocationPanel.add(sourceTreeToolBar, BorderLayout.WEST);
sourceLocationArea.setEditable(false);
sourceLocationArea.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 10), 24));
sourceLocationPanel.add(sourceLocationArea, BorderLayout.CENTER);
JButton openXMIButton = new JButton(SELECT_XMI);
sourceLocationPanel.add(openXMIButton, BorderLayout.EAST);
openXMIButton.setMnemonic('O');
openXMIButton.setToolTipText("Select XMI file...");
openXMIButton.addActionListener(this);
sourceButtonPanel.add(sourceLocationPanel, BorderLayout.NORTH);
// sourceScrollPane =
// DefaultSettings.createScrollPaneWithDefaultFeatures();
sourceScrollPane.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 4),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
sourceScrollPane.setSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
sourceButtonPanel.add(sourceScrollPane, BorderLayout.CENTER);
// construct target panel
targetButtonPanel = new JPanel(new BorderLayout());
targetButtonPanel.setBorder(BorderFactory.createEmptyBorder());
targetLocationPanel = new JPanel(new BorderLayout(2, 0));
targetLocationPanel.setBorder(BorderFactory.createEmptyBorder());
targetTreeCollapseAllAction = new TreeCollapseAllAction(tTree);
targetTreeExpandAllAction = new TreeExpandAllAction(tTree);
JToolBar targetTreeToolBar = new JToolBar("Target Tree Tool Bar");
targetTreeToolBar.setFloatable(false);
targetTreeToolBar.add(targetTreeExpandAllAction);
targetTreeToolBar.add(targetTreeCollapseAllAction);
targetLocationPanel.add(targetTreeToolBar, BorderLayout.WEST);
targetLocationArea.setEditable(false);
targetLocationArea.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 10), 24));
targetLocationPanel.add(targetLocationArea, BorderLayout.CENTER);
targetButtonPanel.add(targetLocationPanel, BorderLayout.NORTH);
// targetScrollPane =
// DefaultSettings.createScrollPaneWithDefaultFeatures();
targetScrollPane.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3),
Config.FRAME_DEFAULT_HEIGHT / 2));
targetButtonPanel.add(targetScrollPane, BorderLayout.CENTER);
targetButtonPanel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 5),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
// construct middle panel
JPanel centerFuncationPanel = new JPanel(new BorderLayout(2, 0));
JPanel middleContainerPanel = new JPanel(new BorderLayout());
// to hold the place equates the source and target button panel so as to
// align the drawing the graphs.
JLabel placeHolderLabel = new JLabel();
placeHolderLabel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 16), 24));
middlePanel = new MappingMiddlePanel(this);
middlePanel.setKind("o2db");
middlePanel.setSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
JButton generateHMBButton = new JButton("Generate HBM Files");
centerFuncationPanel.add(generateHMBButton, BorderLayout.CENTER);
generateHMBButton.addActionListener(this);
centerFuncationPanel.add(placeHolderLabel, BorderLayout.EAST);
centerFuncationPanel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3.5), 24));
middleContainerPanel.add(centerFuncationPanel, BorderLayout.NORTH);
middleContainerPanel.add(middlePanel, BorderLayout.CENTER);
JSplitPane rightSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
DefaultSettings.setDefaultFeatureForJSplitPane(rightSplitPane);
rightSplitPane.setDividerLocation(0.5);
rightSplitPane.setLeftComponent(middleContainerPanel);
rightSplitPane.setRightComponent(targetButtonPanel);
centerSplitPane.setLeftComponent(sourceButtonPanel);
centerSplitPane.setRightComponent(rightSplitPane);
topCenterPanel.add(centerSplitPane, BorderLayout.CENTER);
topCenterPanel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH * 0.8),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
return topCenterPanel;
}
public void actionPerformed(ActionEvent e) {
String command = e.getActionCommand();
try {
boolean everythingGood = true;
if (SELECT_XMI.equals(command)) {
File file = DefaultSettings.getUserInputOfFileFromGUI(this, // FileUtil.getUIWorkingDirectoryPath(),
".xmi", "Open XMI file ...", false, false);
if (file != null) {
// everythingGood = processOpenSourceTree(file, true, true);
ValidatorResults results = processOpenMapFile(file);
}
}
else if (GENERATE_HBM.equals(command)) {
File fileFromPanel = getSaveFile();
if (fileFromPanel == null) {
if (!isSourceTreePopulated() || !isTargetTreePopulated()) {
String msg = "Conduct object to database mapping before saving the map specification.";
JOptionPane.showMessageDialog(this, msg, "Error",
JOptionPane.ERROR_MESSAGE);
return;
}
}
try {
JFileChooser fileChooser = new JFileChooser(fileFromPanel);
fileChooser
.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int result = fileChooser.showOpenDialog(this);
switch (result) {
case JFileChooser.APPROVE_OPTION:
if (!fileChooser.getSelectedFile().exists()) {
boolean mkdirResult = fileChooser.getSelectedFile()
.mkdirs();
if (!mkdirResult) {
JOptionPane
.showMessageDialog(
this,
"Error creating specified directory, please make sure the directory name is correct!",
"Error",
JOptionPane.ERROR_MESSAGE);
return;
}
}
break;
case JFileChooser.CANCEL_OPTION:
System.out.println("HBM Generatoin Cancelled!");
return;
case JFileChooser.ERROR_OPTION:
return;
}
saveMappingFile();
String outputDir=fileChooser.getSelectedFile().getAbsolutePath();
UMLModel model=CumulativeMappingGenerator.getInstance().getMetaModel().getHandler().getModel();
HBMGenerateCacoreIntegrator.getInstance().generateMapping(model,outputDir);
JOptionPane.showMessageDialog(getParent(),
"HBM files are generated at "+ fileChooser.getSelectedFile().getAbsolutePath(),
"HBM Generation Complete",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception ex) {
ex.printStackTrace();
}
}
if (!everythingGood) {
Message msg = MessageResources
.getMessage("GEN3", new Object[0]);
JOptionPane.showMessageDialog(this, msg.toString(), "Error",
JOptionPane.ERROR_MESSAGE);
}
} catch (Exception e1) {
DefaultSettings.reportThrowableToLogAndUI(this, e1, "", this,
false, false);
}
}
private void saveMappingFile() {
File file = getSaveFile();
if (file == null) {
file = DefaultSettings
.getUserInputOfFileFromGUI(this,
Config.MAP_FILE_DEFAULT_EXTENTION, "Save As...",
true, true);
if (file == null) {
// user cancelled the action
return;
}
}
String mapFileName = file.getAbsolutePath().replaceAll(".xmi", ".map");
try {
// CumulativeMappingToMappingFileGenerator.writeMappingFile(new File(mapFileName), file.getAbsolutePath());
ModelMetadata xmiMetada = CumulativeMappingGenerator.getInstance().getMetaModel();
xmiMetada.getHandler().save( file.getAbsolutePath());
setChanged(false);
} catch (Exception ex) {
ex.printStackTrace();
} finally {
setSaveFile(file);
}
}
protected TreeNode loadSourceTreeData(Object metaInfo, File file)
throws Exception {
TreeNode nodes = new DefaultMutableTreeNode("Object Model");
CumulativeMappingGenerator.init(file.getAbsolutePath());
ModelMetadata myModel = CumulativeMappingGenerator.getInstance().getMetaModel();
LinkedHashMap myMap = myModel.getModelMetadata();
Set keySet = myMap.keySet();
Iterator keySetIterator = keySet.iterator();
while (keySetIterator.hasNext()) {
String key = (String) keySetIterator.next();
if (key.contains( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) + ".") ) {
//only directly construct new trerNode for the ObjectMetadata, leave AttributeMeta and AssociateMeta
//to be constructed indirectly as creating the ObjectMetadata
if (myMap.get(key) instanceof gov.nih.nci.caadapter.common.metadata.ObjectMetadata) {
construct_node(nodes, key, (CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) + ".").length(), true);
}
}
}
return nodes;
}
private void construct_node(TreeNode node, String fullName, int prefixLen, boolean isSourceNode)
{
String name = fullName.substring(prefixLen, fullName.length());
String[] pks = name.split("\\.");
ModelMetadata myModel = CumulativeMappingGenerator.getInstance().getMetaModel();
LinkedHashMap myMap = myModel.getModelMetadata();
if (pks.length <= 0)
return;
if (pks.length == 1) {
if (isSourceNode)
((DefaultMutableTreeNode) node).add(new DefaultSourceTreeNode(
myMap.get(fullName), true));
else
((DefaultMutableTreeNode) node).add(new DefaultTargetTreeNode(
myMap.get(fullName), true));
return;
}
//find the package tree node of an object;
//create the missing nodes
DefaultMutableTreeNode packageNode = (DefaultMutableTreeNode) node;
for (int i = 0; i < pks.length - 1; i++) {
boolean exist = false;
Enumeration children = packageNode.children();
while (children.hasMoreElements()) {
DefaultMutableTreeNode current = (DefaultMutableTreeNode) children
.nextElement();
if (current.toString().equals(pks[i])) {
exist = true;
packageNode = current;
break;
}
}
if (!exist) {
DefaultMutableTreeNode newTreeNode = new DefaultMutableTreeNode(
pks[i], true);
packageNode.add(newTreeNode);
packageNode = newTreeNode;
}
}
DefaultMutableTreeNode newTreeNode;
if (isSourceNode)
{
newTreeNode = new DefaultSourceTreeNode(myMap.get(fullName),true);
//process Attributes associated with an object
ObjectMetadata objectMeta=(ObjectMetadata)newTreeNode.getUserObject();
for (AttributeMetadata objectAttr:objectMeta.getAttributes())
{
DefaultSourceTreeNode attrNode=new DefaultSourceTreeNode(objectAttr, true);
addIsoComplexTypeAttribute(1,attrNode, myMap);
newTreeNode.add(attrNode);
}
//process Association associted with an object
for (AssociationMetadata asscMeta: objectMeta.getAssociations())
{
newTreeNode.add(new DefaultSourceTreeNode(asscMeta,false));
}
}
else
newTreeNode = new DefaultTargetTreeNode(myMap.get(fullName), true);
packageNode.add(newTreeNode);
return;
}
private void addIsoComplexTypeAttribute(int attrLevel,DefaultSourceTreeNode elementNode, LinkedHashMap metaHash )
{
if (attrLevel>2)
return;
AttributeMetadata elementMeta=(AttributeMetadata)elementNode.getUserObject();
ObjectMetadata childObject =Iso21090Util.resolveAttributeDatatype(metaHash, elementMeta.getDatatype());
if (childObject==null)
return;
for (AttributeMetadata attrMeta:childObject.getAttributes())
{
DefaultSourceTreeNode childAttrNode=new DefaultSourceTreeNode(attrMeta,true);
elementNode.add(childAttrNode);
addIsoComplexTypeAttribute(attrLevel+1,childAttrNode, metaHash );
}
}
protected TreeNode loadTargetTreeData(Object metaInfo, File absoluteFile)
throws Exception {
TreeNode nodes = new DefaultMutableTreeNode("Data Model");
ModelMetadata myModel = CumulativeMappingGenerator.getInstance().getMetaModel();
LinkedHashMap myMap = myModel.getModelMetadata();
Set keySet = myMap.keySet();
Iterator keySetIterator = keySet.iterator();
while (keySetIterator.hasNext()) {
String key = (String) keySetIterator.next();
if (key.contains( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) + ".")) {
if (myMap.get(key) instanceof gov.nih.nci.caadapter.common.metadata.ObjectMetadata) {
construct_node(nodes, key, ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) + ".").length(), false);
} else {
construct_node(nodes, key, ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) + ".").length(), false);
}
}
}
return nodes;
}
protected void buildTargetTree(Object metaInfo, File absoluteFile,
boolean isToResetGraph) throws Exception {
super.buildTargetTree(metaInfo, absoluteFile, isToResetGraph);
tTree.setCellRenderer(new MMSRenderer());
sTree.setCellRenderer(new MMSRendererPK());
// instantiate the "DropTransferHandler"
mmsTargetTreeDropTransferHandler = new MmsTargetTreeDropTransferHandler(
tTree, getMappingDataManager(), DnDConstants.ACTION_LINK);
}
/**
* Called by actionPerformed() and overridable by descendant classes.
*
* @param file
* @throws Exception
*/
protected boolean processOpenSourceTree(File file, boolean isToResetGraph,
boolean supressReportIssuesToUI) throws Exception {
if( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) == null )
{
CaadapterUtil.savePrefParams( Config.MMS_PREFIX_OBJECTMODEL , "Logical View.Logical Model");
}
if( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) == null )
{
CaadapterUtil.savePrefParams( Config.MMS_PREFIX_DATAMODEL , "Logical View.Data Model");
}
MetaObject metaInfo = null;
buildSourceTree(metaInfo, file, isToResetGraph);
middlePanel.getMappingDataManager().registerSourceComponent(metaInfo,
file);
buildTargetTree(metaInfo, file, isToResetGraph);
middlePanel.getMappingDataManager().registerTargetComponent(metaInfo,
file);
return true;
}
/**
* Called by actionPerformed() and overridable by descendant classes.
*
* @param file
* @throws Exception
*/
protected boolean processOpenTargetTree(File file, boolean isToResetGraph,
boolean supressReportIssuesToUI) throws Exception {
String fileExtension = FileUtil.getFileExtension(file, true);
// parse the file into a meta object graph.
MetaParser parser = null;
MetaObject metaInfo = null;
BaseResult returnResult = null;
returnResult = parser.parse(new FileReader(file));
ValidatorResults validatorResults = returnResult.getValidatorResults();
if (validatorResults != null && validatorResults.hasFatal()) {
Message msg = validatorResults.getMessages(
ValidatorResult.Level.FATAL).get(0);
DefaultSettings.reportThrowableToLogAndUI(this, null, msg
.toString(), this, true, supressReportIssuesToUI);
return false;
}
buildTargetTree(metaInfo, file, isToResetGraph);
middlePanel.getMappingDataManager().registerTargetComponent(metaInfo, file);
return true;
}
/**
* Called by actionPerformed() and overridable by descendant classes.
*
* @param file
* @throws Exception
* changed from protected to pulic by sean
*/
public ValidatorResults processOpenMapFile(File file) throws Exception
{
// Read the XMI Mapping attributes
String fileName = file.getAbsolutePath();
boolean success = CumulativeMappingGenerator.init(fileName);
if (success) {
ModelMetadata xmiModelMeta = CumulativeMappingGenerator.getInstance().getMetaModel();
if (xmiModelMeta == null) {
JOptionPane.showMessageDialog(null, "Error opening XMI file");
}
boolean isSuccess;
// Read XMI File and construct Target and Source Trees
processOpenSourceTree(file, true, true);
DefaultMutableTreeNode rootSTree = (DefaultMutableTreeNode) sTree.getModel().getRoot();
DefaultMutableTreeNode rootTTree = (DefaultMutableTreeNode) tTree.getModel().getRoot();
Hashtable sourceNodes = new Hashtable();
Hashtable targetNodes = new Hashtable();
buildHash(sourceNodes, rootSTree, CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ));
buildHash(targetNodes, rootTTree, CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ));
MappingImpl newMappingImpl = new MappingImpl();
newMappingImpl.setSourceComponent(null);
newMappingImpl.setTargetComponent(null);
middlePanel.getMappingDataManager().setMappingData(newMappingImpl);
middlePanel.getMappingDataManager().clearAllGraphCells();
setSaveFile(file);
processXmiModel(xmiModelMeta,sourceNodes, targetNodes);
} else {
JOptionPane.showMessageDialog( null, "The .map or .xmi file selected is not valid. Please check the export settings in EA and try again.");
}
return null;
}
private void processXmiModel(ModelMetadata myModelMeta, Hashtable sourceNodes, Hashtable targetNodes)
{
CumulativeMappingGenerator cumulativeMappingGenerator = CumulativeMappingGenerator.getInstance();
// Lets try to get all the details
UMLModel myUMLModel = myModelMeta.getModel();
//read and set model prefix
if ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) != null )
{
myModelMeta.setMmsPrefixDataModel(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ));
} else {
myModelMeta.setMmsPrefixDataModel( "Logical View.Data Model" );
}
if ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) != null )
{
myModelMeta.setMmsPrefixObjectModel(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ));
} else {
myModelMeta.setMmsPrefixObjectModel( "Logical View.Logical Model" );
}
boolean isSuccess;
// create Object-table dependency mapping UI
for (UMLDependency dep : myUMLModel.getDependencies())
{
String sourceXpath = "";
String targetXpath = "";
UMLClass client = (UMLClass) dep.getClient();
if (dep.getSupplier() instanceof UMLInterface)
{
Log.logInfo(this, "found UMLInterface:"+((UMLInterface)dep.getSupplier()).getName());
continue;
}
UMLClass supplier = (UMLClass) dep.getSupplier();
StringBuffer pathKey = new StringBuffer(ModelUtil.getFullPackageName(client));
targetXpath = pathKey + "." + client.getName();
pathKey = new StringBuffer(ModelUtil.getFullPackageName(supplier));
sourceXpath = pathKey + "." + supplier.getName();
DefaultMutableTreeNode sourceNode = (DefaultMutableTreeNode) sourceNodes.get(sourceXpath);
DefaultMutableTreeNode targetNode = (DefaultMutableTreeNode) targetNodes.get(targetXpath);
if (sourceNode == null || targetNode == null)
{
Log.logInfo(this, "Dependency missing--- source:"+sourceXpath +" ; target:"+targetXpath);
continue;
}
SDKMetaData sourceSDKMetaData = (SDKMetaData) sourceNode.getUserObject();
SDKMetaData targetSDKMetaData = (SDKMetaData) targetNode.getUserObject();
sourceSDKMetaData.setMapped(true);
//loading XMI and create mapping UI
isSuccess = cumulativeMappingGenerator.map(sourceXpath, targetXpath, false);
isSuccess = isSuccess&& getMappingDataManager().createMapping(
(MappableNode) sourceNode,
(MappableNode) targetNode);
if (!isSuccess)
{
Log.logInfo(this, "No UI link is created for Dependency--- source:"+sourceXpath +" ; target:"+targetXpath +"...:"+CumulativeMappingGenerator.getInstance().getErrorMessage());
}
}
//create class.attribute--table.column mapping
// myModelMeta.getPreservedMappedTag().clear();
for (UMLPackage pkg : myUMLModel.getPackages())
{
for (UMLPackage pkg2 : pkg.getPackages()) {
for (UMLClass clazz : pkg2.getClasses()) {
StringBuffer pathKey = new StringBuffer(ModelUtil.getFullPackageName(clazz));
for (UMLAttribute att : clazz.getAttributes()) {
for (UMLTaggedValue tagValue : att.getTaggedValues()) {
String sourceXpath = "";
String targetXpath = "";
if (tagValue.getName().contains("mapped-attribute")
|| tagValue.getName().contains("implements-association")) {
targetXpath = pathKey + "."
+ clazz.getName() + "."
+ att.getName();
sourceXpath = CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) + "."
+ tagValue.getValue();
DefaultMutableTreeNode sourceNode = (DefaultMutableTreeNode) sourceNodes
.get(sourceXpath);
DefaultMutableTreeNode targetNode = (DefaultMutableTreeNode) targetNodes
.get(targetXpath);
if (sourceNode == null||targetNode == null)
{
Log.logInfo(this, "Mapping missing--- source:"+sourceXpath +" ; target:"+targetXpath);
continue;
}
SDKMetaData sourceSDKMetaData = (SDKMetaData) sourceNode.getUserObject();
sourceSDKMetaData.setMapped(true);
isSuccess = cumulativeMappingGenerator.map(sourceXpath, targetXpath, false);
isSuccess = isSuccess&&
getMappingDataManager().createMapping((MappableNode) sourceNode,(MappableNode) targetNode);
// if (!isSuccess)
// {
// //no UI link is created for the mapped table.column
// //"mapped-attributes"/"implements-association"
// String prvdTag=tagValue.getName()+":"+tagValue.getValue();
// CumulativeMappingGenerator.getInstance().getMetaModel().getPreservedMappedTag().add(prvdTag);
// logger.logInfo(this, "No UI link is created, preserve the mapping:"+prvdTag);
// }
}//tag level loop
}//tag list level loop
}//attribute level loop
}//table level loop
}//data model package level loop
}//model level package level loop
}
private void buildHash(Hashtable hashtable, DefaultMutableTreeNode root,
String parent) {
if ((root.getUserObject().toString().equals("Object Model") && parent
.equals(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL )))
|| (root.getUserObject().toString().equals("Data Model") && parent
.equals(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL )))) {
for (int i = 0; i < root.getChildCount(); i++) {
buildHash(hashtable, (DefaultMutableTreeNode) root
.getChildAt(i), parent);
}
} else {
String treeString;
if (root.getUserObject() instanceof String) {
treeString = (String) root.getUserObject();
} else {
treeString = ((MetaObjectImpl) root.getUserObject()).getName();
}
hashtable.put(parent + "." + treeString, root);
if (root.isLeaf())
return;
for (int i = 0; i < root.getChildCount(); i++) {
buildHash(hashtable, (DefaultMutableTreeNode) root
.getChildAt(i), parent + "." + treeString);
}
}
}
public Map getMenuItems(String menu_name) {
Action action = null;
ContextManager contextManager = ContextManager.getContextManager();
Map<String, Action> actionMap = contextManager.getClientMenuActions(
MenuConstants.DB_TO_OBJECT, menu_name);
if (MenuConstants.FILE_MENU_NAME.equals(menu_name)) {
JRootPane rootPane = this.getRootPane();
if (rootPane != null) {// rootpane is not null implies this panel
// is fully displayed;
// on the flip side, if it is null, it implies it is under
// certain construction.
contextManager.enableAction(ActionConstants.NEW_O2DB_MAP_FILE,
false);
contextManager.enableAction(ActionConstants.OPEN_O2DB_MAP_FILE,
true);
}
}
// since the action depends on the panel instance,
// the old action instance should be removed
if (actionMap != null)
contextManager.removeClientMenuAction(MenuConstants.CSV_SPEC,
menu_name, "");
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.SaveObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.SAVE, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.SAVE, action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.SaveAsObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.SAVE_AS, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.SAVE_AS,
action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.AnotateAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.ANOTATE, action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.ValidateObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.VALIDATE, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.VALIDATE,
action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.hl7.actions.CloseMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.CLOSE, action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.ValidateObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.REPORT_MENU_NAME,
ActionConstants.GENERATE_REPORT, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME,
ActionConstants.GENERATE_REPORT, action);
action.setEnabled(true);
action = new RefreshMapAction(this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.REFRESH,
action);
action.setEnabled(true);
actionMap = contextManager.getClientMenuActions(
MenuConstants.DB_TO_OBJECT, menu_name);
// }
return actionMap;
}
/**
* return the open action inherited with this client.
*/
public Action getDefaultOpenAction() {
ContextManager contextManager = ContextManager.getContextManager();
return contextManager
.getDefinedAction(ActionConstants.OPEN_O2DB_MAP_FILE);
}
/**
* Explicitly reload information from the internal given file.
*
* @throws Exception
*/
public void reload() throws Exception {
processOpenMapFile(getSaveFile());
}
protected TreeDefaultDropTransferHandler getTargetTreeDropTransferHandler() {
return this.mmsTargetTreeDropTransferHandler;
}
/**
* Reload the file specified in the parameter.
*
* @param changedFileMap
*/
public void reload(
Map<MappingFileSynchronizer.FILE_TYPE, File> changedFileMap) {
/**
* Design rationale: 1) if the changedFileMap is null, simply return; 2)
* if the getSaveFile() method does not return null, it implies current
* panel associates with a mapping file, just reload the whole mapping
* file so as to refresh those mapping relationship; 3) if the
* getSaveFile() returns null, just reload source and/or target file
* within the changedFileMap, and ignore the checking of
* MappingFileSynchronizer.FILE_TYPE.Mapping_File item in the map;
*/
if (changedFileMap == null) {
return;
}
File existMapFile = getSaveFile();
try {
if (existMapFile != null) {
if (existMapFile.exists()) {
processOpenMapFile(existMapFile);
} else {// exist map file does not exist anymore
JOptionPane.showMessageDialog(this, existMapFile
.getAbsolutePath()
+ " does not exist or is not accessible anymore",
"File Error", JOptionPane.ERROR_MESSAGE);
return;
}
} else {// exist map file does not exist, simply reload source
// and/or target file
Iterator it = changedFileMap.keySet().iterator();
while (it.hasNext()) {
MappingFileSynchronizer.FILE_TYPE key = (MappingFileSynchronizer.FILE_TYPE) it
.next();
File file = changedFileMap.get(key);
if (GeneralUtilities.areEqual(MappingFileSynchronizer.FILE_TYPE.Source_File, key)) {
processOpenSourceTree(file, true, true);
} else if (GeneralUtilities.areEqual(MappingFileSynchronizer.FILE_TYPE.Target_File, key)) {
processOpenTargetTree(file, true, true);
}
}// end of while
}// end of else
} catch (Exception e) {
DefaultSettings.reportThrowableToLogAndUI(this, e, "", this, false,
false);
}
}
/**
* Returns an ImageIcon, or null if the path was invalid.
*/
protected static ImageIcon createImageIcon(String path)
{
java.net.URL imgURL = DefaultSettings.class.getClassLoader().getResource("images/" + path);
if (imgURL != null)
{
return new ImageIcon(imgURL);
} else
{
System.err.println("Couldn't find file: " + imgURL.toString() + " & " + path);
return null;
}
}
}
/**
* HISTORY : $Log: not supported by cvs2svn $
* HISTORY : Revision 1.42 2009/09/29 17:39:28 wangeug
* HISTORY : exclude valueDomain from mapping panel view
* HISTORY :
* HISTORY : Revision 1.41 2009/07/30 17:37:31 wangeug
* HISTORY : clean codes: implement 4.1.1 requirements
* HISTORY :
* HISTORY : Revision 1.40 2009/07/14 16:36:48 wangeug
* HISTORY : clean codes
* HISTORY :
* HISTORY : Revision 1.39 2009/07/10 19:57:04 wangeug
* HISTORY : MMS re-engineering
* HISTORY :
* HISTORY : Revision 1.38 2009/06/12 15:53:49 wangeug
* HISTORY : clean code: caAdapter MMS 4.1.1
* HISTORY :
* HISTORY : Revision 1.37 2008/09/26 20:35:27 linc
* HISTORY : Updated according to code standard.
* HISTORY :
* HISTORY : Revision 1.36 2008/06/09 19:54:06 phadkes
* HISTORY : New license text replaced for all .java files.
* HISTORY :
* HISTORY : Revision 1.35 2008/06/03 20:12:03 wangeug
* HISTORY : use logger and preserve the primary key tag if mapped to an Object rather than an Object.Attribute
* HISTORY :
* HISTORY : Revision 1.34 2008/05/30 17:35:05 wangeug
* HISTORY : add list to keep preserved mapping information
* HISTORY :
* HISTORY : Revision 1.33 2008/05/29 14:35:16 wangeug
* HISTORY : use caCORE SDK 4.0 process "mapped-attributes" tagvalue
* HISTORY :
* HISTORY : Revision 1.32 2008/05/22 15:48:49 wangeug
* HISTORY : integrate with caCORE SDK to generate Hibernate mapping
* HISTORY :
* HISTORY : Revision 1.31 2007/12/13 21:09:33 wangeug
* HISTORY : resolve code dependence in compiling
* HISTORY :
* HISTORY : Revision 1.30 2007/11/16 17:18:36 wangeug
* HISTORY : clean codes: remove unused "import"
* HISTORY :
* HISTORY : Revision 1.29 2007/10/11 19:06:26 schroedn
* HISTORY : fixed HBMGenerator save error
* HISTORY :
* HISTORY : Revision 1.28 2007/09/21 04:41:08 wuye
* HISTORY : removed system.out
* HISTORY :
* HISTORY : Revision 1.27 2007/09/20 16:40:14 schroedn
* HISTORY : License text
* HISTORY :
* HISTORY : Revision 1.26 2007/09/17 15:08:14 wuye
* HISTORY : added modify discriminator value capability
* HISTORY :
* HISTORY : Revision 1.25 2007/09/14 22:40:08 wuye
* HISTORY : Fixed discriminator issue
* HISTORY :
* HISTORY : Revision 1.24 2007/09/14 15:06:25 wuye
* HISTORY : Added support for table per inheritence structure
* HISTORY :
* HISTORY : Revision 1.23 2007/09/13 21:39:32 wuye
* HISTORY : change arraylist to hashmap
* HISTORY :
* HISTORY : Revision 1.22 2007/09/13 20:48:31 schroedn
* HISTORY : CLob, Discriminator, Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.21 2007/09/13 18:53:40 wuye
* HISTORY : Code re-org
* HISTORY :
* HISTORY : Revision 1.20 2007/09/13 14:20:15 schroedn
* HISTORY : Changes the graphics for Clob/Lazy/Eager/Discriminator
* HISTORY :
* HISTORY : Revision 1.19 2007/09/12 20:56:00 wuye
* HISTORY : Fixed the load from association "lazy-load"
* HISTORY :
* HISTORY : Revision 1.18 2007/09/12 17:57:55 schroedn
* HISTORY : CLob, Discriminator, Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.17 2007/09/12 16:04:15 schroedn
* HISTORY : PreferenceManager -> CaadapterUtil
* HISTORY :
* HISTORY : Revision 1.16 2007/09/12 14:56:46 schroedn
* HISTORY : *** empty log message ***
* HISTORY :
* HISTORY : Revision 1.14 2007/09/11 20:38:40 schroedn
* HISTORY : CLob, Discriminator, Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.12 2007/09/05 15:16:33 schroedn
* HISTORY : Added icons to PK and Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.11 2007/08/30 19:32:08 schroedn
* HISTORY : fixed bug with loading without preferences set
* HISTORY :
* HISTORY : Revision 1.10 2007/08/28 18:36:08 schroedn
* HISTORY : Added a NULL check for preferences
* HISTORY :
* HISTORY : Revision 1.9 2007/08/10 15:57:39 schroedn
* HISTORY : New Feature - Preferences to change prefex in XMI
* HISTORY :
* HISTORY : Revision 1.8 2007/08/09 18:14:31 schroedn
* HISTORY : New Feature - Preferences to change prefex in XMI
* HISTORY :
* HISTORY : Revision 1.7 2007/08/09 16:24:40 schroedn
* HISTORY : New Feature - Preferences to change prefex in XMI
* HISTORY :
* HISTORY : Revision 1.6 2007/08/07 20:50:47 schroedn
* HISTORY : New Feature, Primary Key and Lazy/Eager functions added to MMS
* HISTORY :
* HISTORY : Revision 1.5 2007/08/07 15:54:47 schroedn
* HISTORY : New Feature, Primary Key and Lazy/Eager functions added to MMS
* HISTORY :
* HISTORY : Revision 1.4 2007/07/03 19:33:48 wangeug
* HISTORY : initila loading hl7 code without "clone"
* HISTORY :
* HISTORY : Revision 1.3 2007/06/13 17:22:08 schroedn
* HISTORY : removed functions
* HISTORY :
* HISTORY : Revision 1.2 2007/06/07 16:17:00 schroedn
* HISTORY : Edits to sync with new codebase and java webstart
* HISTORY : HISTORY : Revision 1.1
* 2007/04/03 16:17:57 wangeug HISTORY : initial loading HISTORY : HISTORY :
* Revision 1.14 2006/12/20 16:25:39 wuye HISTORY : Update HMB open file option
* HISTORY : HISTORY : Revision 1.13 2006/12/12 16:53:39 wuye HISTORY : Comment
* out System.out HISTORY : HISTORY : Revision 1.12 2006/11/15 06:28:21 wuye
* HISTORY : added checking whether the mapping file is valide during open map
* file process HISTORY : HISTORY : Revision 1.11 2006/11/14 15:24:16 wuye
* HISTORY : Added validation funcationality HISTORY : HISTORY : Revision 1.10
* 2006/11/10 14:43:41 wuye HISTORY : Disable the validate button on toolbar
* HISTORY : HISTORY : Revision 1.9 2006/10/30 19:51:38 wuye HISTORY : Add a
* dialog for hbm generation HISTORY : HISTORY : Revision 1.8 2006/10/30
* 16:28:57 wuye HISTORY : Modified the Menu structure HISTORY : HISTORY :
* Revision 1.7 2006/10/23 16:20:25 wuye HISTORY : Made changes to ignore
* undragable node HISTORY : HISTORY : Revision 1.6 2006/10/20 21:31:28 wuye
* HISTORY : Added annotate and hbm file generation funcationality HISTORY :
* HISTORY : Revision 1.5 2006/10/10 17:13:25 wuye HISTORY : Added delete
* funcationality HISTORY : HISTORY : Revision 1.2 2006/09/28 19:30:38 wuye
* HISTORY : Removed classes that are not used HISTORY : HISTORY : Revision 1.1
* 2006/09/26 15:47:48 wuye HISTORY : New object 2 database mapping panel
* HISTORY :
*/
|
software/caadapter/src/java/gov/nih/nci/caadapter/ui/mapping/mms/Object2DBMappingPanel.java
|
/**
* <!-- LICENSE_TEXT_START -->
The contents of this file are subject to the caAdapter Software License (the "License"). You may obtain a copy of the License at the following location:
[caAdapter Home Directory]\docs\caAdapter_license.txt, or at:
http://ncicb.nci.nih.gov/infrastructure/cacore_overview/caadapter/indexContent/docs/caAdapter_License
* <!-- LICENSE_TEXT_END -->
*/
package gov.nih.nci.caadapter.ui.mapping.mms;
import gov.nih.nci.caadapter.common.BaseResult;
import gov.nih.nci.caadapter.common.Log;
import gov.nih.nci.caadapter.common.Message;
import gov.nih.nci.caadapter.common.MessageResources;
import gov.nih.nci.caadapter.common.MetaObject;
import gov.nih.nci.caadapter.common.MetaObjectImpl;
import gov.nih.nci.caadapter.common.MetaParser;
import gov.nih.nci.caadapter.common.SDKMetaData;
import gov.nih.nci.caadapter.common.util.Config;
import gov.nih.nci.caadapter.common.util.FileUtil;
import gov.nih.nci.caadapter.common.util.GeneralUtilities;
import gov.nih.nci.caadapter.common.util.CaadapterUtil;
import gov.nih.nci.caadapter.common.util.Iso21090Util;
import gov.nih.nci.caadapter.common.validation.ValidatorResult;
import gov.nih.nci.caadapter.common.validation.ValidatorResults;
import gov.nih.nci.caadapter.hl7.map.impl.MappingImpl;
import gov.nih.nci.caadapter.mms.generator.CumulativeMappingGenerator;
import gov.nih.nci.caadapter.mms.generator.HBMGenerateCacoreIntegrator;
import gov.nih.nci.caadapter.common.metadata.AttributeMetadata;
import gov.nih.nci.caadapter.common.metadata.ModelMetadata;
import gov.nih.nci.caadapter.common.metadata.ObjectMetadata;
import gov.nih.nci.caadapter.ui.common.ActionConstants;
import gov.nih.nci.caadapter.ui.common.DefaultSettings;
import gov.nih.nci.caadapter.ui.common.MappableNode;
import gov.nih.nci.caadapter.ui.common.MappingFileSynchronizer;
import gov.nih.nci.caadapter.ui.common.actions.TreeCollapseAllAction;
import gov.nih.nci.caadapter.ui.common.actions.TreeExpandAllAction;
import gov.nih.nci.caadapter.ui.common.context.ContextManager;
import gov.nih.nci.caadapter.ui.common.context.MenuConstants;
import gov.nih.nci.caadapter.ui.common.tree.DefaultSourceTreeNode;
import gov.nih.nci.caadapter.ui.common.tree.DefaultTargetTreeNode;
import gov.nih.nci.caadapter.ui.common.tree.TreeDefaultDropTransferHandler;
import gov.nih.nci.caadapter.ui.mapping.AbstractMappingPanel;
import gov.nih.nci.caadapter.ui.mapping.MappingMiddlePanel;
import gov.nih.nci.caadapter.ui.mapping.hl7.actions.RefreshMapAction;
import gov.nih.nci.caadapter.ui.mapping.mms.actions.MmsTargetTreeDropTransferHandler;
import gov.nih.nci.ncicb.xmiinout.domain.UMLAttribute;
import gov.nih.nci.ncicb.xmiinout.domain.UMLClass;
import gov.nih.nci.ncicb.xmiinout.domain.UMLDependency;
import gov.nih.nci.ncicb.xmiinout.domain.UMLInterface;
import gov.nih.nci.ncicb.xmiinout.domain.UMLModel;
import gov.nih.nci.ncicb.xmiinout.domain.UMLPackage;
import gov.nih.nci.ncicb.xmiinout.domain.UMLTaggedValue;
import gov.nih.nci.ncicb.xmiinout.util.ModelUtil;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.dnd.DnDConstants;
import java.awt.event.ActionEvent;
import java.io.File;
import java.io.FileReader;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JRootPane;
import javax.swing.JSplitPane;
import javax.swing.JToolBar;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeNode;
/**
* The class is the main panel to construct the UI and initialize the utilities
* to facilitate mapping functions.
*
* @author OWNER: Ye Wu
* @author LAST UPDATE $Author: wangeug $
* @since caAdatper v3.2
* @version $Revision: 1.43 $
* @date $Date: 2009-09-30 17:08:26 $
*/
public class Object2DBMappingPanel extends AbstractMappingPanel {
private static final String LOGID = "$RCSfile: Object2DBMappingPanel.java,v $";
public static String RCSID = "$Header: /share/content/gforge/caadapter/caadapter/components/userInterface/src/gov/nih/nci/caadapter/ui/mapping/mms/Object2DBMappingPanel.java,v 1.43 2009-09-30 17:08:26 wangeug Exp $";
private MmsTargetTreeDropTransferHandler mmsTargetTreeDropTransferHandler = null;
private static final String SELECT_XMI = "Open XMI file...";
private static final String SELECT_XSD = "Open XSD file...";
private static final String GENERATE_HBM = "Generate HBM Files";
public Object2DBMappingPanel() {
this("defaultObjectToDatabaseMapping");
}
public Object2DBMappingPanel(String name) {
this.setBorder(BorderFactory.createEmptyBorder());
this.setLayout(new BorderLayout());
this.add(getCenterPanel(false), BorderLayout.CENTER);
fileSynchronizer = new MappingFileSynchronizer(this);
}
protected JPanel getTopLevelLeftPanel() {
JPanel topCenterPanel = new JPanel(new BorderLayout());
topCenterPanel.setBorder(BorderFactory.createEmptyBorder());
JSplitPane centerSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
DefaultSettings.setDefaultFeatureForJSplitPane(centerSplitPane);
// construct source panel
sourceButtonPanel = new JPanel(new BorderLayout());
sourceButtonPanel.setBorder(BorderFactory.createEmptyBorder());
sourceLocationPanel = new JPanel(new BorderLayout(2, 0));
sourceLocationPanel.setBorder(BorderFactory.createEmptyBorder());
sourceTreeCollapseAllAction = new TreeCollapseAllAction(sTree);
sourceTreeExpandAllAction = new TreeExpandAllAction(sTree);
JToolBar sourceTreeToolBar = new JToolBar("Source Tree Tool Bar");
sourceTreeToolBar.setFloatable(false);
sourceTreeToolBar.add(sourceTreeExpandAllAction);
sourceTreeToolBar.add(sourceTreeCollapseAllAction);
sourceLocationPanel.add(sourceTreeToolBar, BorderLayout.WEST);
sourceLocationArea.setEditable(false);
sourceLocationArea.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 10), 24));
sourceLocationPanel.add(sourceLocationArea, BorderLayout.CENTER);
JButton openXMIButton = new JButton(SELECT_XMI);
sourceLocationPanel.add(openXMIButton, BorderLayout.EAST);
openXMIButton.setMnemonic('O');
openXMIButton.setToolTipText("Select XMI file...");
openXMIButton.addActionListener(this);
sourceButtonPanel.add(sourceLocationPanel, BorderLayout.NORTH);
// sourceScrollPane =
// DefaultSettings.createScrollPaneWithDefaultFeatures();
sourceScrollPane.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 4),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
sourceScrollPane.setSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
sourceButtonPanel.add(sourceScrollPane, BorderLayout.CENTER);
// construct target panel
targetButtonPanel = new JPanel(new BorderLayout());
targetButtonPanel.setBorder(BorderFactory.createEmptyBorder());
targetLocationPanel = new JPanel(new BorderLayout(2, 0));
targetLocationPanel.setBorder(BorderFactory.createEmptyBorder());
targetTreeCollapseAllAction = new TreeCollapseAllAction(tTree);
targetTreeExpandAllAction = new TreeExpandAllAction(tTree);
JToolBar targetTreeToolBar = new JToolBar("Target Tree Tool Bar");
targetTreeToolBar.setFloatable(false);
targetTreeToolBar.add(targetTreeExpandAllAction);
targetTreeToolBar.add(targetTreeCollapseAllAction);
targetLocationPanel.add(targetTreeToolBar, BorderLayout.WEST);
targetLocationArea.setEditable(false);
targetLocationArea.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 10), 24));
targetLocationPanel.add(targetLocationArea, BorderLayout.CENTER);
targetButtonPanel.add(targetLocationPanel, BorderLayout.NORTH);
// targetScrollPane =
// DefaultSettings.createScrollPaneWithDefaultFeatures();
targetScrollPane.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3),
Config.FRAME_DEFAULT_HEIGHT / 2));
targetButtonPanel.add(targetScrollPane, BorderLayout.CENTER);
targetButtonPanel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 5),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
// construct middle panel
JPanel centerFuncationPanel = new JPanel(new BorderLayout(2, 0));
JPanel middleContainerPanel = new JPanel(new BorderLayout());
// to hold the place equates the source and target button panel so as to
// align the drawing the graphs.
JLabel placeHolderLabel = new JLabel();
placeHolderLabel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 16), 24));
middlePanel = new MappingMiddlePanel(this);
middlePanel.setKind("o2db");
middlePanel.setSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
JButton generateHMBButton = new JButton("Generate HBM Files");
centerFuncationPanel.add(generateHMBButton, BorderLayout.CENTER);
generateHMBButton.addActionListener(this);
centerFuncationPanel.add(placeHolderLabel, BorderLayout.EAST);
centerFuncationPanel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH / 3.5), 24));
middleContainerPanel.add(centerFuncationPanel, BorderLayout.NORTH);
middleContainerPanel.add(middlePanel, BorderLayout.CENTER);
JSplitPane rightSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT);
DefaultSettings.setDefaultFeatureForJSplitPane(rightSplitPane);
rightSplitPane.setDividerLocation(0.5);
rightSplitPane.setLeftComponent(middleContainerPanel);
rightSplitPane.setRightComponent(targetButtonPanel);
centerSplitPane.setLeftComponent(sourceButtonPanel);
centerSplitPane.setRightComponent(rightSplitPane);
topCenterPanel.add(centerSplitPane, BorderLayout.CENTER);
topCenterPanel.setPreferredSize(new Dimension(
(int) (Config.FRAME_DEFAULT_WIDTH * 0.8),
(int) (Config.FRAME_DEFAULT_HEIGHT / 1.5)));
return topCenterPanel;
}
public void actionPerformed(ActionEvent e) {
String command = e.getActionCommand();
try {
boolean everythingGood = true;
if (SELECT_XMI.equals(command)) {
File file = DefaultSettings.getUserInputOfFileFromGUI(this, // FileUtil.getUIWorkingDirectoryPath(),
".xmi", "Open XMI file ...", false, false);
if (file != null) {
// everythingGood = processOpenSourceTree(file, true, true);
ValidatorResults results = processOpenMapFile(file);
}
}
else if (GENERATE_HBM.equals(command)) {
File fileFromPanel = getSaveFile();
if (fileFromPanel == null) {
if (!isSourceTreePopulated() || !isTargetTreePopulated()) {
String msg = "Conduct object to database mapping before saving the map specification.";
JOptionPane.showMessageDialog(this, msg, "Error",
JOptionPane.ERROR_MESSAGE);
return;
}
}
try {
JFileChooser fileChooser = new JFileChooser(fileFromPanel);
fileChooser
.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
int result = fileChooser.showOpenDialog(this);
switch (result) {
case JFileChooser.APPROVE_OPTION:
if (!fileChooser.getSelectedFile().exists()) {
boolean mkdirResult = fileChooser.getSelectedFile()
.mkdirs();
if (!mkdirResult) {
JOptionPane
.showMessageDialog(
this,
"Error creating specified directory, please make sure the directory name is correct!",
"Error",
JOptionPane.ERROR_MESSAGE);
return;
}
}
break;
case JFileChooser.CANCEL_OPTION:
System.out.println("HBM Generatoin Cancelled!");
return;
case JFileChooser.ERROR_OPTION:
return;
}
saveMappingFile();
String outputDir=fileChooser.getSelectedFile().getAbsolutePath();
UMLModel model=CumulativeMappingGenerator.getInstance().getMetaModel().getHandler().getModel();
HBMGenerateCacoreIntegrator.getInstance().generateMapping(model,outputDir);
JOptionPane.showMessageDialog(getParent(),
"HBM files are generated at "+ fileChooser.getSelectedFile().getAbsolutePath(),
"HBM Generation Complete",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception ex) {
ex.printStackTrace();
}
}
if (!everythingGood) {
Message msg = MessageResources
.getMessage("GEN3", new Object[0]);
JOptionPane.showMessageDialog(this, msg.toString(), "Error",
JOptionPane.ERROR_MESSAGE);
}
} catch (Exception e1) {
DefaultSettings.reportThrowableToLogAndUI(this, e1, "", this,
false, false);
}
}
private void saveMappingFile() {
File file = getSaveFile();
if (file == null) {
file = DefaultSettings
.getUserInputOfFileFromGUI(this,
Config.MAP_FILE_DEFAULT_EXTENTION, "Save As...",
true, true);
if (file == null) {
// user cancelled the action
return;
}
}
String mapFileName = file.getAbsolutePath().replaceAll(".xmi", ".map");
try {
// CumulativeMappingToMappingFileGenerator.writeMappingFile(new File(mapFileName), file.getAbsolutePath());
ModelMetadata xmiMetada = CumulativeMappingGenerator.getInstance().getMetaModel();
xmiMetada.getHandler().save( file.getAbsolutePath());
setChanged(false);
} catch (Exception ex) {
ex.printStackTrace();
} finally {
setSaveFile(file);
}
}
protected TreeNode loadSourceTreeData(Object metaInfo, File file)
throws Exception {
TreeNode nodes = new DefaultMutableTreeNode("Object Model");
CumulativeMappingGenerator.init(file.getAbsolutePath());
ModelMetadata myModel = CumulativeMappingGenerator.getInstance().getMetaModel();
LinkedHashMap myMap = myModel.getModelMetadata();
Set keySet = myMap.keySet();
Iterator keySetIterator = keySet.iterator();
while (keySetIterator.hasNext()) {
String key = (String) keySetIterator.next();
if (key.contains( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) + ".") ) {
if (myMap.get(key) instanceof gov.nih.nci.caadapter.common.metadata.ObjectMetadata) {
construct_node(nodes, key, (CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) + ".").length(), true, true);
} else {
construct_node(nodes, key, (CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) + ".").length(), false, true);
}
}
}
return nodes;
}
private void construct_node(TreeNode node, String fullName, int prefixLen, boolean isTable, boolean isSourceNode)
{
String name = fullName.substring(prefixLen, fullName.length());
String[] pks = name.split("\\.");
ModelMetadata myModel = CumulativeMappingGenerator.getInstance().getMetaModel();
LinkedHashMap myMap = myModel.getModelMetadata();
if (pks.length <= 0)
return;
if (pks.length == 1) {
if (isSourceNode)
((DefaultMutableTreeNode) node).add(new DefaultSourceTreeNode(
myMap.get(fullName), true));
else
((DefaultMutableTreeNode) node).add(new DefaultTargetTreeNode(
myMap.get(fullName), true));
return;
}
DefaultMutableTreeNode father = (DefaultMutableTreeNode) node;
for (int i = 0; i < pks.length - 1; i++) {
boolean exist = false;
Enumeration children = father.children();
while (children.hasMoreElements()) {
DefaultMutableTreeNode current = (DefaultMutableTreeNode) children
.nextElement();
if (current.toString().equals(pks[i])) {
exist = true;
father = current;
break;
}
}
if (!exist) {
DefaultMutableTreeNode newTreeNode = new DefaultMutableTreeNode(
pks[i], true);
father.add(newTreeNode);
father = newTreeNode;
}
}
DefaultMutableTreeNode newTreeNode;
if (isSourceNode)
{
newTreeNode = new DefaultSourceTreeNode(myMap.get(fullName),true);
if (newTreeNode.getUserObject() instanceof AttributeMetadata)
addIsoComplexTypeAttribute(1,(DefaultSourceTreeNode)newTreeNode, myMap);
}
else
newTreeNode = new DefaultTargetTreeNode(myMap.get(fullName), true);
father.add(newTreeNode);
return;
}
private void addIsoComplexTypeAttribute(int attrLevel,DefaultSourceTreeNode elementNode, LinkedHashMap metaHash )
{
if (attrLevel>3)
return;
AttributeMetadata elementMeta=(AttributeMetadata)elementNode.getUserObject();
ObjectMetadata childObject =Iso21090Util.resolveAttributeDatatype(metaHash, elementMeta.getDatatype());
if (childObject==null)
return;
for (AttributeMetadata attrMeta:childObject.getAttributes())
{
DefaultSourceTreeNode childAttrNode=new DefaultSourceTreeNode(attrMeta,true);
elementNode.add(childAttrNode);
addIsoComplexTypeAttribute(attrLevel+1,childAttrNode, metaHash );
}
}
protected TreeNode loadTargetTreeData(Object metaInfo, File absoluteFile)
throws Exception {
TreeNode nodes = new DefaultMutableTreeNode("Data Model");
ModelMetadata myModel = CumulativeMappingGenerator.getInstance().getMetaModel();
LinkedHashMap myMap = myModel.getModelMetadata();
Set keySet = myMap.keySet();
Iterator keySetIterator = keySet.iterator();
while (keySetIterator.hasNext()) {
String key = (String) keySetIterator.next();
if (key.contains( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) + ".")) {
if (myMap.get(key) instanceof gov.nih.nci.caadapter.common.metadata.ObjectMetadata) {
construct_node(nodes, key, ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) + ".").length(), true, false);
} else {
construct_node(nodes, key, ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) + ".").length(), false, false);
}
}
}
return nodes;
}
protected void buildTargetTree(Object metaInfo, File absoluteFile,
boolean isToResetGraph) throws Exception {
super.buildTargetTree(metaInfo, absoluteFile, isToResetGraph);
tTree.setCellRenderer(new MMSRenderer());
sTree.setCellRenderer(new MMSRendererPK());
// instantiate the "DropTransferHandler"
mmsTargetTreeDropTransferHandler = new MmsTargetTreeDropTransferHandler(
tTree, getMappingDataManager(), DnDConstants.ACTION_LINK);
}
/**
* Called by actionPerformed() and overridable by descendant classes.
*
* @param file
* @throws Exception
*/
protected boolean processOpenSourceTree(File file, boolean isToResetGraph,
boolean supressReportIssuesToUI) throws Exception {
if( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) == null )
{
CaadapterUtil.savePrefParams( Config.MMS_PREFIX_OBJECTMODEL , "Logical View.Logical Model");
}
if( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) == null )
{
CaadapterUtil.savePrefParams( Config.MMS_PREFIX_DATAMODEL , "Logical View.Data Model");
}
MetaObject metaInfo = null;
buildSourceTree(metaInfo, file, isToResetGraph);
middlePanel.getMappingDataManager().registerSourceComponent(metaInfo,
file);
buildTargetTree(metaInfo, file, isToResetGraph);
middlePanel.getMappingDataManager().registerTargetComponent(metaInfo,
file);
return true;
}
/**
* Called by actionPerformed() and overridable by descendant classes.
*
* @param file
* @throws Exception
*/
protected boolean processOpenTargetTree(File file, boolean isToResetGraph,
boolean supressReportIssuesToUI) throws Exception {
String fileExtension = FileUtil.getFileExtension(file, true);
// parse the file into a meta object graph.
MetaParser parser = null;
MetaObject metaInfo = null;
BaseResult returnResult = null;
returnResult = parser.parse(new FileReader(file));
ValidatorResults validatorResults = returnResult.getValidatorResults();
if (validatorResults != null && validatorResults.hasFatal()) {
Message msg = validatorResults.getMessages(
ValidatorResult.Level.FATAL).get(0);
DefaultSettings.reportThrowableToLogAndUI(this, null, msg
.toString(), this, true, supressReportIssuesToUI);
return false;
}
buildTargetTree(metaInfo, file, isToResetGraph);
middlePanel.getMappingDataManager().registerTargetComponent(metaInfo, file);
return true;
}
/**
* Called by actionPerformed() and overridable by descendant classes.
*
* @param file
* @throws Exception
* changed from protected to pulic by sean
*/
public ValidatorResults processOpenMapFile(File file) throws Exception
{
// Read the XMI Mapping attributes
String fileName = file.getAbsolutePath();
boolean success = CumulativeMappingGenerator.init(fileName);
if (success) {
ModelMetadata xmiModelMeta = CumulativeMappingGenerator.getInstance().getMetaModel();
if (xmiModelMeta == null) {
JOptionPane.showMessageDialog(null, "Error opening XMI file");
}
boolean isSuccess;
// Read XMI File and construct Target and Source Trees
processOpenSourceTree(file, true, true);
DefaultMutableTreeNode rootSTree = (DefaultMutableTreeNode) sTree.getModel().getRoot();
DefaultMutableTreeNode rootTTree = (DefaultMutableTreeNode) tTree.getModel().getRoot();
Hashtable sourceNodes = new Hashtable();
Hashtable targetNodes = new Hashtable();
buildHash(sourceNodes, rootSTree, CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ));
buildHash(targetNodes, rootTTree, CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ));
MappingImpl newMappingImpl = new MappingImpl();
newMappingImpl.setSourceComponent(null);
newMappingImpl.setTargetComponent(null);
middlePanel.getMappingDataManager().setMappingData(newMappingImpl);
middlePanel.getMappingDataManager().clearAllGraphCells();
setSaveFile(file);
processXmiModel(xmiModelMeta,sourceNodes, targetNodes);
} else {
JOptionPane.showMessageDialog( null, "The .map or .xmi file selected is not valid. Please check the export settings in EA and try again.");
}
return null;
}
private void processXmiModel(ModelMetadata myModelMeta, Hashtable sourceNodes, Hashtable targetNodes)
{
CumulativeMappingGenerator cumulativeMappingGenerator = CumulativeMappingGenerator.getInstance();
// Lets try to get all the details
UMLModel myUMLModel = myModelMeta.getModel();
//read and set model prefix
if ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ) != null )
{
myModelMeta.setMmsPrefixDataModel(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL ));
} else {
myModelMeta.setMmsPrefixDataModel( "Logical View.Data Model" );
}
if ( CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) != null )
{
myModelMeta.setMmsPrefixObjectModel(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ));
} else {
myModelMeta.setMmsPrefixObjectModel( "Logical View.Logical Model" );
}
boolean isSuccess;
// create Object-table dependency mapping UI
for (UMLDependency dep : myUMLModel.getDependencies())
{
String sourceXpath = "";
String targetXpath = "";
UMLClass client = (UMLClass) dep.getClient();
if (dep.getSupplier() instanceof UMLInterface)
{
Log.logInfo(this, "found UMLInterface:"+((UMLInterface)dep.getSupplier()).getName());
continue;
}
UMLClass supplier = (UMLClass) dep.getSupplier();
StringBuffer pathKey = new StringBuffer(ModelUtil.getFullPackageName(client));
targetXpath = pathKey + "." + client.getName();
pathKey = new StringBuffer(ModelUtil.getFullPackageName(supplier));
sourceXpath = pathKey + "." + supplier.getName();
DefaultMutableTreeNode sourceNode = (DefaultMutableTreeNode) sourceNodes.get(sourceXpath);
DefaultMutableTreeNode targetNode = (DefaultMutableTreeNode) targetNodes.get(targetXpath);
if (sourceNode == null || targetNode == null)
{
Log.logInfo(this, "Dependency missing--- source:"+sourceXpath +" ; target:"+targetXpath);
continue;
}
SDKMetaData sourceSDKMetaData = (SDKMetaData) sourceNode.getUserObject();
SDKMetaData targetSDKMetaData = (SDKMetaData) targetNode.getUserObject();
sourceSDKMetaData.setMapped(true);
//loading XMI and create mapping UI
isSuccess = cumulativeMappingGenerator.map(sourceXpath, targetXpath, false);
isSuccess = isSuccess&& getMappingDataManager().createMapping(
(MappableNode) sourceNode,
(MappableNode) targetNode);
if (!isSuccess)
{
Log.logInfo(this, "No UI link is created for Dependency--- source:"+sourceXpath +" ; target:"+targetXpath +"...:"+CumulativeMappingGenerator.getInstance().getErrorMessage());
}
}
//create class.attribute--table.column mapping
// myModelMeta.getPreservedMappedTag().clear();
for (UMLPackage pkg : myUMLModel.getPackages())
{
for (UMLPackage pkg2 : pkg.getPackages()) {
for (UMLClass clazz : pkg2.getClasses()) {
StringBuffer pathKey = new StringBuffer(ModelUtil.getFullPackageName(clazz));
for (UMLAttribute att : clazz.getAttributes()) {
for (UMLTaggedValue tagValue : att.getTaggedValues()) {
String sourceXpath = "";
String targetXpath = "";
if (tagValue.getName().contains("mapped-attribute")
|| tagValue.getName().contains("implements-association")) {
targetXpath = pathKey + "."
+ clazz.getName() + "."
+ att.getName();
sourceXpath = CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL ) + "."
+ tagValue.getValue();
DefaultMutableTreeNode sourceNode = (DefaultMutableTreeNode) sourceNodes
.get(sourceXpath);
DefaultMutableTreeNode targetNode = (DefaultMutableTreeNode) targetNodes
.get(targetXpath);
if (sourceNode == null||targetNode == null)
{
Log.logInfo(this, "Mapping missing--- source:"+sourceXpath +" ; target:"+targetXpath);
continue;
}
SDKMetaData sourceSDKMetaData = (SDKMetaData) sourceNode.getUserObject();
sourceSDKMetaData.setMapped(true);
isSuccess = cumulativeMappingGenerator.map(sourceXpath, targetXpath, false);
isSuccess = isSuccess&&
getMappingDataManager().createMapping((MappableNode) sourceNode,(MappableNode) targetNode);
// if (!isSuccess)
// {
// //no UI link is created for the mapped table.column
// //"mapped-attributes"/"implements-association"
// String prvdTag=tagValue.getName()+":"+tagValue.getValue();
// CumulativeMappingGenerator.getInstance().getMetaModel().getPreservedMappedTag().add(prvdTag);
// logger.logInfo(this, "No UI link is created, preserve the mapping:"+prvdTag);
// }
}//tag level loop
}//tag list level loop
}//attribute level loop
}//table level loop
}//data model package level loop
}//model level package level loop
}
private void buildHash(Hashtable hashtable, DefaultMutableTreeNode root,
String parent) {
if ((root.getUserObject().toString().equals("Object Model") && parent
.equals(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_OBJECTMODEL )))
|| (root.getUserObject().toString().equals("Data Model") && parent
.equals(CaadapterUtil.readPrefParams( Config.MMS_PREFIX_DATAMODEL )))) {
for (int i = 0; i < root.getChildCount(); i++) {
buildHash(hashtable, (DefaultMutableTreeNode) root
.getChildAt(i), parent);
}
} else {
String treeString;
if (root.getUserObject() instanceof String) {
treeString = (String) root.getUserObject();
} else {
treeString = ((MetaObjectImpl) root.getUserObject()).getName();
}
hashtable.put(parent + "." + treeString, root);
if (root.isLeaf())
return;
for (int i = 0; i < root.getChildCount(); i++) {
buildHash(hashtable, (DefaultMutableTreeNode) root
.getChildAt(i), parent + "." + treeString);
}
}
}
public Map getMenuItems(String menu_name) {
Action action = null;
ContextManager contextManager = ContextManager.getContextManager();
Map<String, Action> actionMap = contextManager.getClientMenuActions(
MenuConstants.DB_TO_OBJECT, menu_name);
if (MenuConstants.FILE_MENU_NAME.equals(menu_name)) {
JRootPane rootPane = this.getRootPane();
if (rootPane != null) {// rootpane is not null implies this panel
// is fully displayed;
// on the flip side, if it is null, it implies it is under
// certain construction.
contextManager.enableAction(ActionConstants.NEW_O2DB_MAP_FILE,
false);
contextManager.enableAction(ActionConstants.OPEN_O2DB_MAP_FILE,
true);
}
}
// since the action depends on the panel instance,
// the old action instance should be removed
if (actionMap != null)
contextManager.removeClientMenuAction(MenuConstants.CSV_SPEC,
menu_name, "");
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.SaveObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.SAVE, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.SAVE, action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.SaveAsObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.SAVE_AS, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.SAVE_AS,
action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.AnotateAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.ANOTATE, action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.ValidateObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.VALIDATE, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.VALIDATE,
action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.hl7.actions.CloseMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.FILE_MENU_NAME, ActionConstants.CLOSE, action);
action.setEnabled(true);
action = new gov.nih.nci.caadapter.ui.mapping.mms.actions.ValidateObjectToDbMapAction(
this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.REPORT_MENU_NAME,
ActionConstants.GENERATE_REPORT, action);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME,
ActionConstants.GENERATE_REPORT, action);
action.setEnabled(true);
action = new RefreshMapAction(this);
contextManager.addClientMenuAction(MenuConstants.DB_TO_OBJECT,
MenuConstants.TOOLBAR_MENU_NAME, ActionConstants.REFRESH,
action);
action.setEnabled(true);
actionMap = contextManager.getClientMenuActions(
MenuConstants.DB_TO_OBJECT, menu_name);
// }
return actionMap;
}
/**
* return the open action inherited with this client.
*/
public Action getDefaultOpenAction() {
ContextManager contextManager = ContextManager.getContextManager();
return contextManager
.getDefinedAction(ActionConstants.OPEN_O2DB_MAP_FILE);
}
/**
* Explicitly reload information from the internal given file.
*
* @throws Exception
*/
public void reload() throws Exception {
processOpenMapFile(getSaveFile());
}
protected TreeDefaultDropTransferHandler getTargetTreeDropTransferHandler() {
return this.mmsTargetTreeDropTransferHandler;
}
/**
* Reload the file specified in the parameter.
*
* @param changedFileMap
*/
public void reload(
Map<MappingFileSynchronizer.FILE_TYPE, File> changedFileMap) {
/**
* Design rationale: 1) if the changedFileMap is null, simply return; 2)
* if the getSaveFile() method does not return null, it implies current
* panel associates with a mapping file, just reload the whole mapping
* file so as to refresh those mapping relationship; 3) if the
* getSaveFile() returns null, just reload source and/or target file
* within the changedFileMap, and ignore the checking of
* MappingFileSynchronizer.FILE_TYPE.Mapping_File item in the map;
*/
if (changedFileMap == null) {
return;
}
File existMapFile = getSaveFile();
try {
if (existMapFile != null) {
if (existMapFile.exists()) {
processOpenMapFile(existMapFile);
} else {// exist map file does not exist anymore
JOptionPane.showMessageDialog(this, existMapFile
.getAbsolutePath()
+ " does not exist or is not accessible anymore",
"File Error", JOptionPane.ERROR_MESSAGE);
return;
}
} else {// exist map file does not exist, simply reload source
// and/or target file
Iterator it = changedFileMap.keySet().iterator();
while (it.hasNext()) {
MappingFileSynchronizer.FILE_TYPE key = (MappingFileSynchronizer.FILE_TYPE) it
.next();
File file = changedFileMap.get(key);
if (GeneralUtilities.areEqual(MappingFileSynchronizer.FILE_TYPE.Source_File, key)) {
processOpenSourceTree(file, true, true);
} else if (GeneralUtilities.areEqual(MappingFileSynchronizer.FILE_TYPE.Target_File, key)) {
processOpenTargetTree(file, true, true);
}
}// end of while
}// end of else
} catch (Exception e) {
DefaultSettings.reportThrowableToLogAndUI(this, e, "", this, false,
false);
}
}
/**
* Returns an ImageIcon, or null if the path was invalid.
*/
protected static ImageIcon createImageIcon(String path)
{
java.net.URL imgURL = DefaultSettings.class.getClassLoader().getResource("images/" + path);
if (imgURL != null)
{
return new ImageIcon(imgURL);
} else
{
System.err.println("Couldn't find file: " + imgURL.toString() + " & " + path);
return null;
}
}
}
/**
* HISTORY : $Log: not supported by cvs2svn $
* HISTORY : Revision 1.42 2009/09/29 17:39:28 wangeug
* HISTORY : exclude valueDomain from mapping panel view
* HISTORY :
* HISTORY : Revision 1.41 2009/07/30 17:37:31 wangeug
* HISTORY : clean codes: implement 4.1.1 requirements
* HISTORY :
* HISTORY : Revision 1.40 2009/07/14 16:36:48 wangeug
* HISTORY : clean codes
* HISTORY :
* HISTORY : Revision 1.39 2009/07/10 19:57:04 wangeug
* HISTORY : MMS re-engineering
* HISTORY :
* HISTORY : Revision 1.38 2009/06/12 15:53:49 wangeug
* HISTORY : clean code: caAdapter MMS 4.1.1
* HISTORY :
* HISTORY : Revision 1.37 2008/09/26 20:35:27 linc
* HISTORY : Updated according to code standard.
* HISTORY :
* HISTORY : Revision 1.36 2008/06/09 19:54:06 phadkes
* HISTORY : New license text replaced for all .java files.
* HISTORY :
* HISTORY : Revision 1.35 2008/06/03 20:12:03 wangeug
* HISTORY : use logger and preserve the primary key tag if mapped to an Object rather than an Object.Attribute
* HISTORY :
* HISTORY : Revision 1.34 2008/05/30 17:35:05 wangeug
* HISTORY : add list to keep preserved mapping information
* HISTORY :
* HISTORY : Revision 1.33 2008/05/29 14:35:16 wangeug
* HISTORY : use caCORE SDK 4.0 process "mapped-attributes" tagvalue
* HISTORY :
* HISTORY : Revision 1.32 2008/05/22 15:48:49 wangeug
* HISTORY : integrate with caCORE SDK to generate Hibernate mapping
* HISTORY :
* HISTORY : Revision 1.31 2007/12/13 21:09:33 wangeug
* HISTORY : resolve code dependence in compiling
* HISTORY :
* HISTORY : Revision 1.30 2007/11/16 17:18:36 wangeug
* HISTORY : clean codes: remove unused "import"
* HISTORY :
* HISTORY : Revision 1.29 2007/10/11 19:06:26 schroedn
* HISTORY : fixed HBMGenerator save error
* HISTORY :
* HISTORY : Revision 1.28 2007/09/21 04:41:08 wuye
* HISTORY : removed system.out
* HISTORY :
* HISTORY : Revision 1.27 2007/09/20 16:40:14 schroedn
* HISTORY : License text
* HISTORY :
* HISTORY : Revision 1.26 2007/09/17 15:08:14 wuye
* HISTORY : added modify discriminator value capability
* HISTORY :
* HISTORY : Revision 1.25 2007/09/14 22:40:08 wuye
* HISTORY : Fixed discriminator issue
* HISTORY :
* HISTORY : Revision 1.24 2007/09/14 15:06:25 wuye
* HISTORY : Added support for table per inheritence structure
* HISTORY :
* HISTORY : Revision 1.23 2007/09/13 21:39:32 wuye
* HISTORY : change arraylist to hashmap
* HISTORY :
* HISTORY : Revision 1.22 2007/09/13 20:48:31 schroedn
* HISTORY : CLob, Discriminator, Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.21 2007/09/13 18:53:40 wuye
* HISTORY : Code re-org
* HISTORY :
* HISTORY : Revision 1.20 2007/09/13 14:20:15 schroedn
* HISTORY : Changes the graphics for Clob/Lazy/Eager/Discriminator
* HISTORY :
* HISTORY : Revision 1.19 2007/09/12 20:56:00 wuye
* HISTORY : Fixed the load from association "lazy-load"
* HISTORY :
* HISTORY : Revision 1.18 2007/09/12 17:57:55 schroedn
* HISTORY : CLob, Discriminator, Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.17 2007/09/12 16:04:15 schroedn
* HISTORY : PreferenceManager -> CaadapterUtil
* HISTORY :
* HISTORY : Revision 1.16 2007/09/12 14:56:46 schroedn
* HISTORY : *** empty log message ***
* HISTORY :
* HISTORY : Revision 1.14 2007/09/11 20:38:40 schroedn
* HISTORY : CLob, Discriminator, Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.12 2007/09/05 15:16:33 schroedn
* HISTORY : Added icons to PK and Lazy/Eager
* HISTORY :
* HISTORY : Revision 1.11 2007/08/30 19:32:08 schroedn
* HISTORY : fixed bug with loading without preferences set
* HISTORY :
* HISTORY : Revision 1.10 2007/08/28 18:36:08 schroedn
* HISTORY : Added a NULL check for preferences
* HISTORY :
* HISTORY : Revision 1.9 2007/08/10 15:57:39 schroedn
* HISTORY : New Feature - Preferences to change prefex in XMI
* HISTORY :
* HISTORY : Revision 1.8 2007/08/09 18:14:31 schroedn
* HISTORY : New Feature - Preferences to change prefex in XMI
* HISTORY :
* HISTORY : Revision 1.7 2007/08/09 16:24:40 schroedn
* HISTORY : New Feature - Preferences to change prefex in XMI
* HISTORY :
* HISTORY : Revision 1.6 2007/08/07 20:50:47 schroedn
* HISTORY : New Feature, Primary Key and Lazy/Eager functions added to MMS
* HISTORY :
* HISTORY : Revision 1.5 2007/08/07 15:54:47 schroedn
* HISTORY : New Feature, Primary Key and Lazy/Eager functions added to MMS
* HISTORY :
* HISTORY : Revision 1.4 2007/07/03 19:33:48 wangeug
* HISTORY : initila loading hl7 code without "clone"
* HISTORY :
* HISTORY : Revision 1.3 2007/06/13 17:22:08 schroedn
* HISTORY : removed functions
* HISTORY :
* HISTORY : Revision 1.2 2007/06/07 16:17:00 schroedn
* HISTORY : Edits to sync with new codebase and java webstart
* HISTORY : HISTORY : Revision 1.1
* 2007/04/03 16:17:57 wangeug HISTORY : initial loading HISTORY : HISTORY :
* Revision 1.14 2006/12/20 16:25:39 wuye HISTORY : Update HMB open file option
* HISTORY : HISTORY : Revision 1.13 2006/12/12 16:53:39 wuye HISTORY : Comment
* out System.out HISTORY : HISTORY : Revision 1.12 2006/11/15 06:28:21 wuye
* HISTORY : added checking whether the mapping file is valide during open map
* file process HISTORY : HISTORY : Revision 1.11 2006/11/14 15:24:16 wuye
* HISTORY : Added validation funcationality HISTORY : HISTORY : Revision 1.10
* 2006/11/10 14:43:41 wuye HISTORY : Disable the validate button on toolbar
* HISTORY : HISTORY : Revision 1.9 2006/10/30 19:51:38 wuye HISTORY : Add a
* dialog for hbm generation HISTORY : HISTORY : Revision 1.8 2006/10/30
* 16:28:57 wuye HISTORY : Modified the Menu structure HISTORY : HISTORY :
* Revision 1.7 2006/10/23 16:20:25 wuye HISTORY : Made changes to ignore
* undragable node HISTORY : HISTORY : Revision 1.6 2006/10/20 21:31:28 wuye
* HISTORY : Added annotate and hbm file generation funcationality HISTORY :
* HISTORY : Revision 1.5 2006/10/10 17:13:25 wuye HISTORY : Added delete
* funcationality HISTORY : HISTORY : Revision 1.2 2006/09/28 19:30:38 wuye
* HISTORY : Removed classes that are not used HISTORY : HISTORY : Revision 1.1
* 2006/09/26 15:47:48 wuye HISTORY : New object 2 database mapping panel
* HISTORY :
*/
|
Process an object meta together with its attribute and association meta
SVN-Revision: 2572
|
software/caadapter/src/java/gov/nih/nci/caadapter/ui/mapping/mms/Object2DBMappingPanel.java
|
Process an object meta together with its attribute and association meta
|
|
Java
|
mit
|
fe38b9d888b45484a8a7f23c9a6b06bb5b5dd732
| 0
|
JCThePants/NucleusFramework,JCThePants/NucleusFramework
|
/*
* This file is part of NucleusFramework for Bukkit, licensed under the MIT License (MIT).
*
* Copyright (c) JCThePants (www.jcwhatever.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.jcwhatever.nucleus;
import com.jcwhatever.nucleus.managed.commands.ICommand;
import com.jcwhatever.nucleus.managed.commands.ICommandDispatcher;
import com.jcwhatever.nucleus.managed.language.ILanguageContext;
import com.jcwhatever.nucleus.managed.messaging.IChatPrefixed;
import com.jcwhatever.nucleus.managed.messaging.IMessenger;
import com.jcwhatever.nucleus.mixins.ILoadable;
import com.jcwhatever.nucleus.providers.storage.DataStorage;
import com.jcwhatever.nucleus.storage.DataPath;
import com.jcwhatever.nucleus.storage.IDataNode;
import com.jcwhatever.nucleus.storage.MemoryDataNode;
import org.bukkit.command.PluginCommand;
import org.bukkit.event.Listener;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.plugin.java.JavaPluginLoader;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* An abstract implementation of a Bukkit plugin with
* NucleusFramework specific features.
*/
public abstract class NucleusPlugin extends JavaPlugin
implements IChatPrefixed, ILoadable {
static List<NucleusPlugin> _enabled = new ArrayList<>(10);
private ILanguageContext _languageContext;
private ICommandDispatcher _commandDispatcher;
private IDataNode _dataNode;
private boolean _isDebugging;
private IMessenger _messenger;
private IMessenger _anonMessenger;
private boolean _isTesting;
boolean _isEnabled;
/**
* Constructor.
*/
public NucleusPlugin() {
super();
try {
onInit();
}
catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
/**
* Constructor for testing.
*/
protected NucleusPlugin(JavaPluginLoader loader, PluginDescriptionFile description,
File dataFolder, File file) {
super(loader, description, dataFolder, file);
_isTesting = true;
try {
onInit();
}
catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
/**
* Determine if the plugin is in debug mode.
*/
public final boolean isDebugging() {
return _isDebugging;
}
/**
* Determine if the plugin is being tested.
*/
public final boolean isTesting() {
return _isTesting;
}
/**
* Set the plugins debug mode.
*
* @param isDebugging True to turn debug on.
*/
public final void setDebugging(boolean isDebugging) {
_isDebugging = isDebugging;
}
/**
* Determine if the plugin is finished loading.
*/
@Override
public boolean isLoaded() {
return isEnabled() && _isEnabled;
}
@Override
public abstract String getChatPrefix();
@Override
public abstract String getConsolePrefix();
/**
* Get the plugins data node.
*/
public IDataNode getDataNode() {
return _dataNode;
}
/**
* Get the plugins language context.
*/
public ILanguageContext getLanguageContext() {
return _languageContext;
}
/**
* Get the plugins chat and console messenger.
*/
public IMessenger getMessenger() {
return _messenger;
}
/**
* Get the plugins anonymous chat messenger.
*
* <p>A messenger that has no chat prefix.</p>
*/
public IMessenger getAnonMessenger() {
return _anonMessenger;
}
/**
* Get the plugins command dispatcher.
*/
public ICommandDispatcher getCommandDispatcher() {
return _commandDispatcher;
}
@Override
public final void onEnable() {
onPreEnable();
_messenger = Nucleus.getMessengerFactory().get(this);
_anonMessenger = Nucleus.getMessengerFactory().getAnon(this);
loadDataNode();
_languageContext = Nucleus.getLanguageManager().createContext(this);
Nucleus.registerPlugin(this);
if (!(this instanceof BukkitPlugin))
_enabled.add(this);
onPostPreEnable();
_commandDispatcher = Nucleus.getCommandManager().createDispatcher(this);
Map<String, Map<String, Object>> commands = getDescription().getCommands();
if (commands != null) {
for (String cmd : commands.keySet()) {
PluginCommand command = getCommand(cmd);
command.setExecutor(_commandDispatcher);
command.setTabCompleter(_commandDispatcher);
}
}
}
@Override
public final void onDisable() {
Nucleus.unregisterPlugin(this);
try {
onDisablePlugin();
}
catch (Throwable e) {
e.printStackTrace();
}
}
/**
* Invoked when the plugin is instantiated.
*
* <p>Intended for optional override.</p>
*/
protected void onInit() {
// do nothing
}
/**
* Invoked before the plugin config is loaded.
*
* <p>Intended for optional override.</p>
*/
protected void onPreEnable() {
// do nothing
}
/**
* Invoked after the plugin data node is loaded but before the plugin is enabled.
*
* <p>Intended for optional override.</p>
*/
protected void onPostPreEnable() {
// do nothing
}
/**
* Invoked when the plugin is enabled.
*/
protected abstract void onEnablePlugin();
/**
* Invoked when the plugin is disabled.
*/
protected abstract void onDisablePlugin();
/**
* Register a command.
*/
protected void registerCommand(Class<? extends ICommand> commandClass) {
_commandDispatcher.registerCommand(commandClass);
}
/**
* Register event listeners.
*
* @param listeners The listeners to register.
*/
protected void registerEventListeners(Listener...listeners) {
PluginManager pm = getServer().getPluginManager();
for (Listener listener : listeners) {
pm.registerEvents(listener, this);
}
}
/*
* Load the plugins data node.
*/
private void loadDataNode() {
File dir = getDataFolder();
if (!_isTesting && !dir.exists() && !dir.mkdirs()) {
throw new RuntimeException("Failed to create data folders.");
}
if (_isTesting) {
_dataNode = new MemoryDataNode(this);
}
else {
_dataNode = DataStorage.get(this, new DataPath("config"));
if (!_dataNode.load()) {
getServer().getPluginManager().disablePlugin(this);
throw new RuntimeException("The plugins data node (config) could not be loaded!");
}
}
_isDebugging = _dataNode.getBoolean("debug");
}
}
|
src/com/jcwhatever/nucleus/NucleusPlugin.java
|
/*
* This file is part of NucleusFramework for Bukkit, licensed under the MIT License (MIT).
*
* Copyright (c) JCThePants (www.jcwhatever.com)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.jcwhatever.nucleus;
import com.jcwhatever.nucleus.managed.commands.ICommand;
import com.jcwhatever.nucleus.managed.commands.ICommandDispatcher;
import com.jcwhatever.nucleus.managed.language.ILanguageContext;
import com.jcwhatever.nucleus.managed.messaging.IChatPrefixed;
import com.jcwhatever.nucleus.managed.messaging.IMessenger;
import com.jcwhatever.nucleus.mixins.ILoadable;
import com.jcwhatever.nucleus.providers.storage.DataStorage;
import com.jcwhatever.nucleus.storage.DataPath;
import com.jcwhatever.nucleus.storage.IDataNode;
import com.jcwhatever.nucleus.storage.MemoryDataNode;
import org.bukkit.command.PluginCommand;
import org.bukkit.event.Listener;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.plugin.java.JavaPluginLoader;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* An abstract implementation of a Bukkit plugin with
* NucleusFramework specific features.
*/
public abstract class NucleusPlugin extends JavaPlugin
implements IChatPrefixed, ILoadable {
static List<NucleusPlugin> _enabled = new ArrayList<>(10);
private ILanguageContext _languageContext;
private ICommandDispatcher _commandDispatcher;
private IDataNode _dataNode;
private boolean _isDebugging;
private IMessenger _messenger;
private IMessenger _anonMessenger;
private boolean _isTesting;
boolean _isEnabled;
/**
* Constructor.
*/
public NucleusPlugin() {
super();
onInit();
}
/**
* Constructor for testing.
*/
protected NucleusPlugin(JavaPluginLoader loader, PluginDescriptionFile description,
File dataFolder, File file) {
super(loader, description, dataFolder, file);
_isTesting = true;
onInit();
}
/**
* Determine if the plugin is in debug mode.
*/
public final boolean isDebugging() {
return _isDebugging;
}
/**
* Determine if the plugin is being tested.
*/
public final boolean isTesting() {
return _isTesting;
}
/**
* Set the plugins debug mode.
*
* @param isDebugging True to turn debug on.
*/
public final void setDebugging(boolean isDebugging) {
_isDebugging = isDebugging;
}
/**
* Determine if the plugin is finished loading.
*/
@Override
public boolean isLoaded() {
return isEnabled() && _isEnabled;
}
@Override
public abstract String getChatPrefix();
@Override
public abstract String getConsolePrefix();
/**
* Get the plugins data node.
*/
public IDataNode getDataNode() {
return _dataNode;
}
/**
* Get the plugins language context.
*/
public ILanguageContext getLanguageContext() {
return _languageContext;
}
/**
* Get the plugins chat and console messenger.
*/
public IMessenger getMessenger() {
return _messenger;
}
/**
* Get the plugins anonymous chat messenger.
*
* <p>A messenger that has no chat prefix.</p>
*/
public IMessenger getAnonMessenger() {
return _anonMessenger;
}
/**
* Get the plugins command dispatcher.
*/
public ICommandDispatcher getCommandDispatcher() {
return _commandDispatcher;
}
@Override
public final void onEnable() {
onPreEnable();
_messenger = Nucleus.getMessengerFactory().get(this);
_anonMessenger = Nucleus.getMessengerFactory().getAnon(this);
loadDataNode();
_languageContext = Nucleus.getLanguageManager().createContext(this);
Nucleus.registerPlugin(this);
if (!(this instanceof BukkitPlugin))
_enabled.add(this);
onPostPreEnable();
_commandDispatcher = Nucleus.getCommandManager().createDispatcher(this);
Map<String, Map<String, Object>> commands = getDescription().getCommands();
if (commands != null) {
for (String cmd : commands.keySet()) {
PluginCommand command = getCommand(cmd);
command.setExecutor(_commandDispatcher);
command.setTabCompleter(_commandDispatcher);
}
}
}
@Override
public final void onDisable() {
Nucleus.unregisterPlugin(this);
try {
onDisablePlugin();
}
catch (Throwable e) {
e.printStackTrace();
}
}
/**
* Invoked when the plugin is instantiated.
*
* <p>Intended for optional override.</p>
*/
protected void onInit() {
// do nothing
}
/**
* Invoked before the plugin config is loaded.
*
* <p>Intended for optional override.</p>
*/
protected void onPreEnable() {
// do nothing
}
/**
* Invoked after the plugin data node is loaded but before the plugin is enabled.
*
* <p>Intended for optional override.</p>
*/
protected void onPostPreEnable() {
// do nothing
}
/**
* Invoked when the plugin is enabled.
*/
protected abstract void onEnablePlugin();
/**
* Invoked when the plugin is disabled.
*/
protected abstract void onDisablePlugin();
/**
* Register a command.
*/
protected void registerCommand(Class<? extends ICommand> commandClass) {
_commandDispatcher.registerCommand(commandClass);
}
/**
* Register event listeners.
*
* @param listeners The listeners to register.
*/
protected void registerEventListeners(Listener...listeners) {
PluginManager pm = getServer().getPluginManager();
for (Listener listener : listeners) {
pm.registerEvents(listener, this);
}
}
/*
* Load the plugins data node.
*/
private void loadDataNode() {
File dir = getDataFolder();
if (!_isTesting && !dir.exists() && !dir.mkdirs()) {
throw new RuntimeException("Failed to create data folders.");
}
if (_isTesting) {
_dataNode = new MemoryDataNode(this);
}
else {
_dataNode = DataStorage.get(this, new DataPath("config"));
if (!_dataNode.load()) {
getServer().getPluginManager().disablePlugin(this);
throw new RuntimeException("The plugins data node (config) could not be loaded!");
}
}
_isDebugging = _dataNode.getBoolean("debug");
}
}
|
catch onInit exceptions to print message to console
|
src/com/jcwhatever/nucleus/NucleusPlugin.java
|
catch onInit exceptions to print message to console
|
|
Java
|
mit
|
0ba97fa1bd86dbbe8122f95dd37e3f8d2ac0b1cd
| 0
|
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
|
package org.innovateuk.ifs.project.grantofferletter.security;
import org.innovateuk.ifs.application.domain.Application;
import org.innovateuk.ifs.commons.ZeroDowntime;
import org.innovateuk.ifs.commons.security.PermissionRule;
import org.innovateuk.ifs.commons.security.PermissionRules;
import org.innovateuk.ifs.project.resource.ProjectCompositeId;
import org.innovateuk.ifs.project.resource.ProjectResource;
import org.innovateuk.ifs.security.BasePermissionRules;
import org.innovateuk.ifs.user.resource.UserResource;
import org.springframework.stereotype.Component;
import static org.innovateuk.ifs.util.SecurityRuleUtil.*;
@PermissionRules
@Component
public class GrantOfferLetterPermissionRules extends BasePermissionRules {
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Partners can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean partnersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
return isPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Competitions team & Project Finance can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean internalUsersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternalAdmin(user);
}
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Support users can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean supportUsersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
return isSupport(user);
}
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Innovation lead users can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract), of projects from competition assigned to them")
public boolean innovationLeadUsersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
Application application = applicationRepository.findOne(project.getApplication());
return userIsInnovationLeadOnCompetition(application.getCompetition().getId(), user.getId());
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Partners can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean partnersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
return isPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Competitions team & Project Finance can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean internalUsersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternalAdmin(user);
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Support users can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean supportUsersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
return isSupport(user);
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Innovation lead users can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract), of projects from competition assigned to them")
public boolean innovationLeadUsersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
Application application = applicationRepository.findOne(project.getApplication());
return userIsInnovationLeadOnCompetition(application.getCompetition().getId(), user.getId());
}
@PermissionRule(
value = "UPLOAD_SIGNED_GRANT_OFFER",
description = "Project manager or Lead partner can upload signed grant offer letter")
public boolean leadPartnerCanUploadGrantOfferLetter(ProjectResource project, UserResource user) {
return isLeadPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "UPLOAD_SIGNED_GRANT_OFFER",
description = "Project manager or Lead partner can upload signed grant offer letter")
public boolean projectManagerCanUploadGrantOfferLetter(ProjectResource project, UserResource user) {
return isProjectManager(project.getId(), user.getId());
}
@PermissionRule(
value = "DELETE_SIGNED_GRANT_OFFER",
description = "Lead partner can delete signed grant offer letter")
public boolean leadPartnerCanDeleteSignedGrantOfferLetter(ProjectResource project, UserResource user) {
return isLeadPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "SUBMIT_GRANT_OFFER_LETTER",
description = "Project manager can submit the grant offer letter")
public boolean projectManagerSubmitGrantOfferLetter(ProjectCompositeId projectCompositeId, UserResource user) {
return isProjectManager(projectCompositeId.id(), user.getId());
}
@PermissionRule(
value = "SEND_GRANT_OFFER_LETTER",
description = "Internal users can send the Grant Offer Letter notification")
public boolean internalUserCanSendGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternal(user);
}
@PermissionRule(
value = "APPROVE_SIGNED_GRANT_OFFER_LETTER",
description = "Internal users can approve the signed Grant Offer Letter")
public boolean internalUsersCanApproveSignedGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternal(user);
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Internal users can view the send status of Grant Offer Letter for a project")
public boolean internalAdminUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
return isInternalAdmin(user);
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Support users can view the send status of Grant Offer Letter for a project")
public boolean supportUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
return isSupport(user);
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Innovation lead users can view the send status of Grant Offer Letter for a project from competition assigned to them")
public boolean innovationLeadUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
Application application = applicationRepository.findOne(project.getApplication());
return userIsInnovationLeadOnCompetition(application.getCompetition().getId(), user.getId());
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Partners can view the send status of Grant Offer Letter for a project")
public boolean externalUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
return isPartner(project.getId(), user.getId());
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_APPROVED_STATUS", description = "A user can view signed grant offer letter approval status that they are partners on")
public boolean partnersOnProjectCanViewSignedGrantOfferLetterApprovedStatus(ProjectResource project, UserResource user) {
return project != null && isPartner(project.getId(), user.getId());
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_APPROVED_STATUS", description = "Internal users can view signed grant offer letter approval status")
public boolean internalUsersCanViewSignedGrantOfferLetterApprovedStatus(ProjectResource project, UserResource user) {
return isInternal(user);
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_REJECTED_STATUS", description = "Internal users can view signed grant offer letter rejection status")
public boolean internalUsersCanViewSignedGrantOfferLetterRejectedStatus(ProjectResource project, UserResource user) {
return isInternal(user);
}
@ZeroDowntime(reference = "IFS-2579", description = "Remove in Sprint 19 - replaced with usage of getGrantOfferLetterState()")
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_REJECTED_STATUS", description = "Project manager can view signed grant offer letter rejection status")
public boolean projectManagerCanViewSignedGrantOfferLetterRejectedStatus(ProjectResource project, UserResource user) {
return project != null && isProjectManager(project.getId(), user.getId());
}
}
|
ifs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/project/grantofferletter/security/GrantOfferLetterPermissionRules.java
|
package org.innovateuk.ifs.project.grantofferletter.security;
import org.innovateuk.ifs.application.domain.Application;
import org.innovateuk.ifs.commons.security.PermissionRule;
import org.innovateuk.ifs.commons.security.PermissionRules;
import org.innovateuk.ifs.project.resource.ProjectCompositeId;
import org.innovateuk.ifs.project.resource.ProjectResource;
import org.innovateuk.ifs.security.BasePermissionRules;
import org.innovateuk.ifs.user.resource.UserResource;
import org.springframework.stereotype.Component;
import static org.innovateuk.ifs.util.SecurityRuleUtil.*;
@PermissionRules
@Component
public class GrantOfferLetterPermissionRules extends BasePermissionRules {
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Partners can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean partnersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
return isPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Competitions team & Project Finance can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean internalUsersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternalAdmin(user);
}
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Support users can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean supportUsersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
return isSupport(user);
}
@PermissionRule(
value = "DOWNLOAD_GRANT_OFFER",
description = "Innovation lead users can download grant offer documents (Unsigned grant offer, signed grant offer, Additional contract), of projects from competition assigned to them")
public boolean innovationLeadUsersCanDownloadGrantOfferLetter(ProjectResource project, UserResource user) {
Application application = applicationRepository.findOne(project.getApplication());
return userIsInnovationLeadOnCompetition(application.getCompetition().getId(), user.getId());
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Partners can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean partnersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
return isPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Competitions team & Project Finance can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean internalUsersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternalAdmin(user);
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Support users can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract)")
public boolean supportUsersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
return isSupport(user);
}
@PermissionRule(
value = "VIEW_GRANT_OFFER",
description = "Innovation lead users can view grant offer documents (Unsigned grant offer, signed grant offer, Additional contract), of projects from competition assigned to them")
public boolean innovationLeadUsersCanViewGrantOfferLetter(ProjectResource project, UserResource user) {
Application application = applicationRepository.findOne(project.getApplication());
return userIsInnovationLeadOnCompetition(application.getCompetition().getId(), user.getId());
}
@PermissionRule(
value = "UPLOAD_SIGNED_GRANT_OFFER",
description = "Project manager or Lead partner can upload signed grant offer letter")
public boolean leadPartnerCanUploadGrantOfferLetter(ProjectResource project, UserResource user) {
return isLeadPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "UPLOAD_SIGNED_GRANT_OFFER",
description = "Project manager or Lead partner can upload signed grant offer letter")
public boolean projectManagerCanUploadGrantOfferLetter(ProjectResource project, UserResource user) {
return isProjectManager(project.getId(), user.getId());
}
@PermissionRule(
value = "DELETE_SIGNED_GRANT_OFFER",
description = "Lead partner can delete signed grant offer letter")
public boolean leadPartnerCanDeleteSignedGrantOfferLetter(ProjectResource project, UserResource user) {
return isLeadPartner(project.getId(), user.getId());
}
@PermissionRule(
value = "SUBMIT_GRANT_OFFER_LETTER",
description = "Project manager can submit the grant offer letter")
public boolean projectManagerSubmitGrantOfferLetter(ProjectCompositeId projectCompositeId, UserResource user) {
return isProjectManager(projectCompositeId.id(), user.getId());
}
@PermissionRule(
value = "SEND_GRANT_OFFER_LETTER",
description = "Internal users can send the Grant Offer Letter notification")
public boolean internalUserCanSendGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternal(user);
}
@PermissionRule(
value = "APPROVE_SIGNED_GRANT_OFFER_LETTER",
description = "Internal users can approve the signed Grant Offer Letter")
public boolean internalUsersCanApproveSignedGrantOfferLetter(ProjectResource project, UserResource user) {
return isInternal(user);
}
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Internal users can view the send status of Grant Offer Letter for a project")
public boolean internalAdminUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
return isInternalAdmin(user);
}
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Support users can view the send status of Grant Offer Letter for a project")
public boolean supportUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
return isSupport(user);
}
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Innovation lead users can view the send status of Grant Offer Letter for a project from competition assigned to them")
public boolean innovationLeadUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
Application application = applicationRepository.findOne(project.getApplication());
return userIsInnovationLeadOnCompetition(application.getCompetition().getId(), user.getId());
}
@PermissionRule(
value = "VIEW_GRANT_OFFER_LETTER_SEND_STATUS",
description = "Partners can view the send status of Grant Offer Letter for a project")
public boolean externalUserCanViewSendGrantOfferLetterStatus(ProjectResource project, UserResource user) {
return isPartner(project.getId(), user.getId());
}
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_APPROVED_STATUS", description = "A user can view signed grant offer letter approval status that they are partners on")
public boolean partnersOnProjectCanViewSignedGrantOfferLetterApprovedStatus(ProjectResource project, UserResource user) {
return project != null && isPartner(project.getId(), user.getId());
}
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_APPROVED_STATUS", description = "Internal users can view signed grant offer letter approval status")
public boolean internalUsersCanViewSignedGrantOfferLetterApprovedStatus(ProjectResource project, UserResource user) {
return isInternal(user);
}
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_REJECTED_STATUS", description = "Internal users can view signed grant offer letter rejection status")
public boolean internalUsersCanViewSignedGrantOfferLetterRejectedStatus(ProjectResource project, UserResource user) {
return isInternal(user);
}
@PermissionRule(value = "VIEW_SIGNED_GRANT_OFFER_LETTER_REJECTED_STATUS", description = "Project manager can view signed grant offer letter rejection status")
public boolean projectManagerCanViewSignedGrantOfferLetterRejectedStatus(ProjectResource project, UserResource user) {
return project != null && isProjectManager(project.getId(), user.getId());
}
}
|
IFS-2579 - added ZDD remove notes to new permission methods
|
ifs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/project/grantofferletter/security/GrantOfferLetterPermissionRules.java
|
IFS-2579 - added ZDD remove notes to new permission methods
|
|
Java
|
mit
|
5705eba4e7a7c4d8f45ce388a7fd690655af5a22
| 0
|
joand/codingame,joand/codingame,joand/codingame,joand/codingame,joand/codingame,joand/codingame
|
package fr.joand;
import fr.joand.exception.ElementNotFoundException;
import fr.joand.model.Edge;
import fr.joand.model.Factory;
import fr.joand.model.Owner;
import fr.joand.model.Troop;
import java.util.*;
import java.util.stream.Collectors;
/**
* sources :
* http://www.vogella.com/tutorials/JavaAlgorithmsDijkstra/article.html
* https://mail-attachment.googleusercontent.com/attachment/u/0/?ui=2&ik=26879a90be&view=att&th=13a50271fc085591&attid=0.2&disp=inline&realattid=f_h85xz9341&safe=1&zw&saddbat=ANGjdJ80ygnCoNeDC9IW_BeMrbaHsPMOyCZVHVhJiRIb3Bx7zYzvotbkSu4Ccl7L0bQ1g5MU-3qVVpz8J1r1gDKNr8XmsVwUn2OFyRsKXsmPmN5ByoMPdpR6J6jnoAKCBl0NXk3N3z-z_JNVARkc-Hx5ES9qBL5FA-ZKaTmuzSeKpKxmNSdFFFnv2iKayFpJbUzhB5imQxy4e9jxrpdAxvt7ae91iAwECj5g2gu0ESl05x9Mrb2ydh-gwlMsZq8xyilMa4eAQnXVjdYsQSbQ3KhNgEozfBzXqxRo7TjHNK_ok2Ar0Qy9aPtoLFTdwh4T74kZ-yDJJiul1mPTobasPWv1oKBW76oaOybAqmnkbdbwkcgqMUgYs8r0iF3xsXfOavLfhJsnWi8ElxvrIMpgEsbMsyV9UXRGm3C-wG0QqNWfwVeLanr5ZcIIBlOs1LDY9xYQ-Vs0Xco1QfkCQydWI_vjGsbAAYOc2NtYkwHaayAx4bE12TuTOe1jX3mggpXKLhMoDixeomO4AUrTcX9XFP2UAUl96bBX-sbQmpZQ7Aqy9Q2f4MZZUc4LTksPgL9i5JtNsi6xSRH-GK9BC8RDjz8iih51LSkYfEj2aS_W9sOOdNiztWw7E3i0hyRld-f2bHxxIULepnkirx1aa0rzANR15JE7rLh5aopgb30-rQ
* http://algs4.cs.princeton.edu/code/
* https://mail-attachment.googleusercontent.com/attachment/u/0/?ui=2&ik=26879a90be&view=att&th=144181d1dbd12052&attid=0.1&disp=inline&realattid=f_hrgp4bvu0&safe=1&zw&saddbat=ANGjdJ_JIBSxUhjXUIqeJ0jzNlA-BHjIGbRoerYHZzzbM2NZW4qF3Gpz2UWsx-X0yr39Od8wJPHyPZ2VaL3zEdwzALacEgmHfOYsWz1cQqq2efevk65cT4oxD2xHMb9oyHkHy0IrCoBH_R18XRC08fj6Q-4_D0Fej7Scy1FwLL8l3UJYdTWMtG4cFghJIsFJGxF-FiZurJ7Yc_3ZEj5dA1-KxoJe2pEufyZoDR93C6-VjA-Iw6D7iJfLsh8r4MO1v-dlwQbut9TWVM-ccv5hjxkaT1N96mM90DUiUQV4849iKRbG2x_4mxuLAEGqMizeh7eaTrfZtL8MYqTI-dDQ53i9tbfCesyTExnZyUmDlD_Vxnkq8Sl5NXwxf-VBznUoC-248OsGl43SesmR8Ck5H49iLEG6_h4wrP9jZvPNOK2qYzmmwIVjfimdnPrNOY331mLhXPGvQ_UsTUJ5AC4L2z5IdVGnrwa-moX91ib6znpu7TNmf4yheAZeZiAixXKxmQYNKXlsuiDlYJFOlRGrgglk-y6hOqLIfXwYxGrY8zi7lxTS3Ggr-1nOUox14WwoCftRhiJ86XtwG1vNFC7ll6eO9qgIrDbkJQ7-tf1BTHsgxwf1NVXLw3-qUjJm4QA1RQerQkJnGhXw3mELyCvNrZ3V2QabCsHvEEuBlVAE8w
* https://www.khanacademy.org/computing/computer-science/algorithms
* http://fr.wikihow.com/gagner-une-partie-de-Risk
* StarCraft
*/
public class App {
/**
* @return the full Edge
* @throws ElementNotFoundException if asked Edge is not found
*/
public static Edge getEdge(List<Edge> edges, int factoryId_A, int factoryId_B) {
return edges.stream()
.filter(edge -> (edge.getFactoryId_A() == factoryId_A && edge.getFactoryId_B() == factoryId_B)
||
(edge.getFactoryId_A() == factoryId_B && edge.getFactoryId_B() == factoryId_A)
)
.findAny().orElseThrow(ElementNotFoundException::new);
}
/**
* @return the asked Factory or null if not found
*/
public static Factory getFactory(List<Factory> factories, int id) {
return factories.stream()
.filter(factory -> factory.getId() == id)
.findAny().orElse(null);
}
public static void main(String args[]) {
Scanner in = new Scanner(System.in);
int factoryCount = in.nextInt(); // the number of factories
List<Factory> factories = new ArrayList<>(factoryCount);
for (int id = 0; id < factoryCount; id++) {
Factory factory = new Factory(id);
factories.add(factory);
}
System.err.println("factoryCount : " + factoryCount);
int linkCount = in.nextInt(); // the number of links between factories
List<Edge> edges = new ArrayList<>(linkCount);
System.err.println("linkCount : " + linkCount);
for (int i = 0; i < linkCount; i++) {
int factory1 = in.nextInt();
int factory2 = in.nextInt();
int distance = in.nextInt();
Edge edge = new Edge(factory1, factory2, distance);
System.err.println("edge : " + edge.toString());
edges.add(edge);
}
// game loop
while (true) {
int entityCount = in.nextInt(); // the number of entities (e.g. factories and troops)
for (int i = 0; i < entityCount; i++) {
int entityId = in.nextInt();
String entityType = in.next();
int arg1 = in.nextInt();
Owner owner = Owner.get(arg1);
int arg2 = in.nextInt();
int arg3 = in.nextInt();
int arg4 = in.nextInt();
int arg5 = in.nextInt();
switch (entityType) {
case "FACTORY":
int nbOfCyborgsInFactory = arg2;
int production = arg3;
Factory factory = getFactory(factories, entityId);
if (factory != null) {
int previousProduction = factory.getProduction();
if (previousProduction != production) {
System.err.println("production changed !");
}
factory.setOwner(owner);
factory.setStockOfCyborgs(nbOfCyborgsInFactory);
factory.setProduction(production);
System.err.println("factory updated : " + factory.toString());
} else {
System.err.println("!!! FACTORY NOT FOUND !!!");
}
break;
case "TROOP":
int factoryIdSource = arg2;
int factoryIdDestination = arg3;
Edge edge = getEdge(edges, factoryIdSource, factoryIdDestination);
int nbOfCyborgsInTroop = arg4;
int remainingDistance = arg5;
Troop troop = new Troop(entityId, owner, nbOfCyborgsInTroop, remainingDistance, factoryIdDestination);
System.err.println("troop created : " + troop.toString());
edge.addTroop(troop);
break;
default:
break;
}
}
calculateOpportunityScore(factories, edges);
calculateDangerScore(factories, edges);
takeADecision(factories, edges);
clearTroopsFrom(edges);
}
}
private static void clearTroopsFrom(List<Edge> edges) {
edges.stream().forEach(edge -> edge.clearTroops());
}
/**
* based on production [divide by] distance <br/>
* the bigger the better !
*/
public static void calculateOpportunityScore(List<Factory> factories, List<Edge> edges) {
List<Factory> notOwnedFactories = factories.stream()
.filter(factory1 -> factory1.getOwner() != Owner.ally).collect(Collectors.toList());
for (Factory factory : notOwnedFactories) {
int maxScore = 0;
for (Factory neighbor : getAllyNeighbors(factories, edges, factory)) {
float score = (float) factory.getProduction() / (float) (getEdge(edges, factory.getId(), neighbor.getId()).getDistance() + factory.getStockOfCyborgs());
int intScore = Math.round(score * 10000);
maxScore = Math.max(maxScore, intScore);
}
factory.setOpportunityScore(maxScore);
}
}
public static List<Factory> getAllyNeighbors(List<Factory> factories, List<Edge> edges, Factory factory) {
/*
* garder les edges qui ont la factory sur une extrémité
* puis récupérer les factories de ces edges != factory de référence
* */
List<Edge> edgeNeighbors = getConnectedEdges(factory, edges);
List<Factory> allNeighbors = new ArrayList<>();
for (Edge edge : edgeNeighbors) {
allNeighbors.add(
getFactory(
factories,
edge.getFactoryId_A() != factory.getId() ?
edge.getFactoryId_A() : edge.getFactoryId_B())
);
}
return allNeighbors.stream().filter(factory1 -> factory1.getOwner() == Owner.ally).collect(Collectors.toList());
}
/**
* calculate the number of cyborgs needed to defend (ally) or conquer (enemy) a factory <br/>
* based on incoming hostile troops vs incoming ally troops
*/
public static void calculateDangerScore(List<Factory> factories, List<Edge> edges) {
for (Factory factory : factories) {
int score = factory.getOwner() == Owner.ally ? factory.getStockOfCyborgs() * -1 : factory.getStockOfCyborgs();
List<Edge> connectedEdge = getConnectedEdges(factory, edges);
for (Edge edge : connectedEdge) {
List<Troop> hostileTroops = edge.getTroops().stream()
.filter(troop -> troop.getOwner() != Owner.ally).collect(Collectors.toList());
for (Troop troop : hostileTroops) {
score += troop.getNbOfCyborgs();
}
List<Troop> allyTroops = edge.getTroops().stream()
.filter(troop -> troop.getOwner() == Owner.ally).collect(Collectors.toList());
for (Troop troop : allyTroops) {
score -= troop.getNbOfCyborgs();
}
}
factory.setDangerScore(score);
}
}
/**
* Any valid action, such as "WAIT" or "MOVE source destination cyborgs" <br/>
* based on production & distance
*/
public static void takeADecision(List<Factory> factories, List<Edge> edges) {
// trier les factories pour avoir les > opportunity score en premier
List<Factory> opportunitySorted = factories.stream().sorted((o1, o2) -> o2.getOpportunityScore() - o1.getOpportunityScore()).collect(Collectors.toList());
List<Factory> dangerSorted = factories.stream().sorted((o1, o2) -> o2.getDangerScore() - o1.getDangerScore()).collect(Collectors.toList());
// filtrer par Owner
List<Factory> toDefend = dangerSorted.stream().filter(factory -> factory.getOwner() == Owner.ally).collect(Collectors.toList());
List<Factory> toConquer = opportunitySorted.stream().filter(factory -> factory.getOwner() != Owner.ally).collect(Collectors.toList());
//if (opening(factories, allies)) {
// conquérir le plus rapidement
StringBuffer action = new StringBuffer();
// offensive
int weaponSize = 2;
for (Factory target : toConquer) {
// je prend celui qui a le plus de cyborgs... c'est plus facile à trouver
Factory source = getAllyNeighbors(factories, edges, target).stream()
.sorted((o1, o2) -> o2.getStockOfCyborgs() - o1.getStockOfCyborgs())
.findFirst().get();
action.append(move(source.getId(), target.getId(), Math.round((float) source.getStockOfCyborgs() / (float) weaponSize)));
}
// defensive
int shieldSize = 2;
for (Factory target : toDefend) {
Factory source = getAllyNeighbors(factories, edges, target).stream()
.sorted((o1, o2) -> o2.getStockOfCyborgs() - o1.getStockOfCyborgs())
.findFirst().orElse(null);
if (target.getDangerScore() > 0 && source != null) {
action.append(move(source.getId(), target.getId(), Math.round((float) source.getStockOfCyborgs() / (float) shieldSize)));
}
}
/*
} else if (midGame()) {
// construire, défendre et attaquer
} else if (endGame()) {
// achever
}
//*/
System.out.println(action.toString() + "MSG end turn");
}
private static boolean opening(List<Factory> factories, List<Factory> allies) {
return allies.size() / factories.size() < 0.5;
}
private static boolean midGame() {
return false;
}
private static boolean endGame() {
return false;
}
private static List<Edge> getConnectedEdges(Factory factory, List<Edge> edges) {
return edges.stream()
.filter(edge -> edge.getFactoryId_A() == factory.getId() || edge.getFactoryId_B() == factory.getId())
.collect(Collectors.toList());
}
public static String move(int source, int destination, int cyborgs) {
return "MOVE " + source + " " + destination + " " + cyborgs + ";";
}
}
|
challenge/GhostInTheCell/src/main/java/fr/joand/App.java
|
package fr.joand;
import fr.joand.exception.ElementNotFoundException;
import fr.joand.model.Edge;
import fr.joand.model.Factory;
import fr.joand.model.Owner;
import fr.joand.model.Troop;
import java.util.*;
import java.util.stream.Collectors;
/**
* sources :
* http://www.vogella.com/tutorials/JavaAlgorithmsDijkstra/article.html
* https://mail-attachment.googleusercontent.com/attachment/u/0/?ui=2&ik=26879a90be&view=att&th=13a50271fc085591&attid=0.2&disp=inline&realattid=f_h85xz9341&safe=1&zw&saddbat=ANGjdJ80ygnCoNeDC9IW_BeMrbaHsPMOyCZVHVhJiRIb3Bx7zYzvotbkSu4Ccl7L0bQ1g5MU-3qVVpz8J1r1gDKNr8XmsVwUn2OFyRsKXsmPmN5ByoMPdpR6J6jnoAKCBl0NXk3N3z-z_JNVARkc-Hx5ES9qBL5FA-ZKaTmuzSeKpKxmNSdFFFnv2iKayFpJbUzhB5imQxy4e9jxrpdAxvt7ae91iAwECj5g2gu0ESl05x9Mrb2ydh-gwlMsZq8xyilMa4eAQnXVjdYsQSbQ3KhNgEozfBzXqxRo7TjHNK_ok2Ar0Qy9aPtoLFTdwh4T74kZ-yDJJiul1mPTobasPWv1oKBW76oaOybAqmnkbdbwkcgqMUgYs8r0iF3xsXfOavLfhJsnWi8ElxvrIMpgEsbMsyV9UXRGm3C-wG0QqNWfwVeLanr5ZcIIBlOs1LDY9xYQ-Vs0Xco1QfkCQydWI_vjGsbAAYOc2NtYkwHaayAx4bE12TuTOe1jX3mggpXKLhMoDixeomO4AUrTcX9XFP2UAUl96bBX-sbQmpZQ7Aqy9Q2f4MZZUc4LTksPgL9i5JtNsi6xSRH-GK9BC8RDjz8iih51LSkYfEj2aS_W9sOOdNiztWw7E3i0hyRld-f2bHxxIULepnkirx1aa0rzANR15JE7rLh5aopgb30-rQ
* http://algs4.cs.princeton.edu/code/
* https://mail-attachment.googleusercontent.com/attachment/u/0/?ui=2&ik=26879a90be&view=att&th=144181d1dbd12052&attid=0.1&disp=inline&realattid=f_hrgp4bvu0&safe=1&zw&saddbat=ANGjdJ_JIBSxUhjXUIqeJ0jzNlA-BHjIGbRoerYHZzzbM2NZW4qF3Gpz2UWsx-X0yr39Od8wJPHyPZ2VaL3zEdwzALacEgmHfOYsWz1cQqq2efevk65cT4oxD2xHMb9oyHkHy0IrCoBH_R18XRC08fj6Q-4_D0Fej7Scy1FwLL8l3UJYdTWMtG4cFghJIsFJGxF-FiZurJ7Yc_3ZEj5dA1-KxoJe2pEufyZoDR93C6-VjA-Iw6D7iJfLsh8r4MO1v-dlwQbut9TWVM-ccv5hjxkaT1N96mM90DUiUQV4849iKRbG2x_4mxuLAEGqMizeh7eaTrfZtL8MYqTI-dDQ53i9tbfCesyTExnZyUmDlD_Vxnkq8Sl5NXwxf-VBznUoC-248OsGl43SesmR8Ck5H49iLEG6_h4wrP9jZvPNOK2qYzmmwIVjfimdnPrNOY331mLhXPGvQ_UsTUJ5AC4L2z5IdVGnrwa-moX91ib6znpu7TNmf4yheAZeZiAixXKxmQYNKXlsuiDlYJFOlRGrgglk-y6hOqLIfXwYxGrY8zi7lxTS3Ggr-1nOUox14WwoCftRhiJ86XtwG1vNFC7ll6eO9qgIrDbkJQ7-tf1BTHsgxwf1NVXLw3-qUjJm4QA1RQerQkJnGhXw3mELyCvNrZ3V2QabCsHvEEuBlVAE8w
* https://www.khanacademy.org/computing/computer-science/algorithms
* http://fr.wikihow.com/gagner-une-partie-de-Risk
* StarCraft
*/
public class App {
/**
* @return the full Edge
* @throws ElementNotFoundException if asked Edge is not found
*/
public static Edge getEdge(List<Edge> edges, int factoryId_A, int factoryId_B) {
return edges.stream()
.filter(edge -> (edge.getFactoryId_A() == factoryId_A && edge.getFactoryId_B() == factoryId_B)
||
(edge.getFactoryId_A() == factoryId_B && edge.getFactoryId_B() == factoryId_A)
)
.findAny().orElseThrow(ElementNotFoundException::new);
}
/**
* @return the asked Factory or null if not found
*/
public static Factory getFactory(List<Factory> factories, int id) {
return factories.stream()
.filter(factory -> factory.getId() == id)
.findAny().orElse(null);
}
public static void main(String args[]) {
Scanner in = new Scanner(System.in);
int factoryCount = in.nextInt(); // the number of factories
List<Factory> factories = new ArrayList<>(factoryCount);
for (int id = 0; id < factoryCount; id++) {
Factory factory = new Factory(id);
factories.add(factory);
}
System.err.println("factoryCount : " + factoryCount);
int linkCount = in.nextInt(); // the number of links between factories
List<Edge> edges = new ArrayList<>(linkCount);
System.err.println("linkCount : " + linkCount);
for (int i = 0; i < linkCount; i++) {
int factory1 = in.nextInt();
int factory2 = in.nextInt();
int distance = in.nextInt();
Edge edge = new Edge(factory1, factory2, distance);
System.err.println("edge : " + edge.toString());
edges.add(edge);
}
// game loop
while (true) {
int entityCount = in.nextInt(); // the number of entities (e.g. factories and troops)
for (int i = 0; i < entityCount; i++) {
int entityId = in.nextInt();
String entityType = in.next();
int arg1 = in.nextInt();
Owner owner = Owner.get(arg1);
int arg2 = in.nextInt();
int arg3 = in.nextInt();
int arg4 = in.nextInt();
int arg5 = in.nextInt();
switch (entityType) {
case "FACTORY":
int nbOfCyborgsInFactory = arg2;
int production = arg3;
Factory factory = getFactory(factories, entityId);
if (factory != null) {
int previousProduction = factory.getProduction();
if (previousProduction != production) {
System.err.println("production changed !");
}
factory.setOwner(owner);
factory.setStockOfCyborgs(nbOfCyborgsInFactory);
factory.setProduction(production);
System.err.println("factory updated : " + factory.toString());
} else {
System.err.println("!!! FACTORY NOT FOUND !!!");
}
break;
case "TROOP":
int factoryIdSource = arg2;
int factoryIdDestination = arg3;
Edge edge = getEdge(edges, factoryIdSource, factoryIdDestination);
int nbOfCyborgsInTroop = arg4;
int remainingDistance = arg5;
Troop troop = new Troop(entityId, owner, nbOfCyborgsInTroop, remainingDistance, factoryIdDestination);
System.err.println("troop created : " + troop.toString());
edge.addTroop(troop);
break;
default:
break;
}
}
calculateOpportunityScore(factories, edges);
calculateDangerScore(factories, edges);
takeADecision(factories, edges);
clearTroopsFrom(edges);
}
}
private static void clearTroopsFrom(List<Edge> edges) {
edges.stream().forEach(edge -> edge.clearTroops());
}
/**
* based on production [divide by] distance <br/>
* the bigger the better !
*/
public static void calculateOpportunityScore(List<Factory> factories, List<Edge> edges) {
List<Factory> notOwnedFactories = factories.stream()
.filter(factory1 -> factory1.getOwner() != Owner.ally).collect(Collectors.toList());
for (Factory factory : notOwnedFactories) {
int maxScore = 0;
for (Factory neighbor : getAllyNeighbors(factories, edges, factory)) {
float score = (float) factory.getProduction() / (float) (getEdge(edges, factory.getId(), neighbor.getId()).getDistance() + factory.getStockOfCyborgs());
int intScore = Math.round(score * 10000);
maxScore = Math.max(maxScore, intScore);
}
factory.setOpportunityScore(maxScore);
}
}
public static List<Factory> getAllyNeighbors(List<Factory> factories, List<Edge> edges, Factory factory) {
/*
* garder les edges qui ont la factory sur une extrémité
* puis récupérer les factories de ces edges != factory de référence
* */
List<Edge> edgeNeighbors = getConnectedEdges(factory, edges);
List<Factory> allNeighbors = new ArrayList<>();
for (Edge edge : edgeNeighbors) {
allNeighbors.add(
getFactory(
factories,
edge.getFactoryId_A() != factory.getId() ?
edge.getFactoryId_A() : edge.getFactoryId_B())
);
}
return allNeighbors.stream().filter(factory1 -> factory1.getOwner() == Owner.ally).collect(Collectors.toList());
}
/**
* calculate the number of cyborgs needed to defend (ally) or conquer (enemy) a factory <br/>
* based on incoming hostile troops vs incoming ally troops
*/
public static void calculateDangerScore(List<Factory> factories, List<Edge> edges) {
for (Factory factory : factories) {
int score = factory.getOwner() == Owner.ally ? factory.getStockOfCyborgs() * -1 : factory.getStockOfCyborgs();
List<Edge> connectedEdge = getConnectedEdges(factory, edges);
for (Edge edge : connectedEdge) {
List<Troop> hostileTroops = edge.getTroops().stream()
.filter(troop -> troop.getOwner() != Owner.ally).collect(Collectors.toList());
for (Troop troop : hostileTroops) {
score += troop.getNbOfCyborgs();
}
List<Troop> allyTroops = edge.getTroops().stream()
.filter(troop -> troop.getOwner() == Owner.ally).collect(Collectors.toList());
for (Troop troop : allyTroops) {
score -= troop.getNbOfCyborgs();
}
}
factory.setDangerScore(score);
}
}
/**
* Any valid action, such as "WAIT" or "MOVE source destination cyborgs" <br/>
* based on production & distance
*/
public static void takeADecision(List<Factory> factories, List<Edge> edges) {
// trier les factories pour avoir les > opportunity score en premier
List<Factory> opportunitySorted = factories.stream().sorted((o1, o2) -> o2.getOpportunityScore() - o1.getOpportunityScore()).collect(Collectors.toList());
List<Factory> dangerSorted = factories.stream().sorted((o1, o2) -> o2.getDangerScore() - o1.getDangerScore()).collect(Collectors.toList());
// filtrer par Owner
List<Factory> allies = dangerSorted.stream().filter(factory -> factory.getOwner() == Owner.ally).collect(Collectors.toList());
List<Factory> others = opportunitySorted.stream().filter(factory -> factory.getOwner() != Owner.ally).collect(Collectors.toList());
//if (opening(factories, allies)) {
// conquérir le plus rapidement
StringBuffer action = new StringBuffer();
// offensive
int weaponSize = 2;
for (Factory target : others) {
// je prend celui qui a le plus de cyborgs...
Factory source = getAllyNeighbors(factories, edges, target).stream()
.sorted((o1, o2) -> o2.getStockOfCyborgs() - o1.getStockOfCyborgs())
.findFirst().get();
action.append(move(source.getId(), target.getId(), Math.round((float) source.getStockOfCyborgs() / (float) weaponSize)));
}
// defensive
int shieldSize = 2;
for (Factory target : allies) {
Factory source = getAllyNeighbors(factories, edges, target).stream()
.sorted((o1, o2) -> o2.getStockOfCyborgs() - o1.getStockOfCyborgs())
.findFirst().orElse(null);
if (target.getDangerScore() > 0 && source != null) {
action.append(move(source.getId(), target.getId(), Math.round((float) source.getStockOfCyborgs() / (float) shieldSize)));
}
}
/*
} else if (midGame()) {
// construire, défendre et attaquer
} else if (endGame()) {
// achever
}
//*/
System.out.println(action.toString() + "MSG end turn");
}
private static boolean opening(List<Factory> factories, List<Factory> allies) {
return allies.size() / factories.size() < 0.5;
}
private static boolean midGame() {
return false;
}
private static boolean endGame() {
return false;
}
private static List<Edge> getConnectedEdges(Factory factory, List<Edge> edges) {
return edges.stream()
.filter(edge -> edge.getFactoryId_A() == factory.getId() || edge.getFactoryId_B() == factory.getId())
.collect(Collectors.toList());
}
public static String move(int source, int destination, int cyborgs) {
return "MOVE " + source + " " + destination + " " + cyborgs + ";";
}
}
|
rename variables
|
challenge/GhostInTheCell/src/main/java/fr/joand/App.java
|
rename variables
|
|
Java
|
mit
|
a74c66fe63b7546f0d5fa00aab6d4ed8436e2ce8
| 0
|
commercetools/commercetools-payone-integration,commercetools/commercetools-payone-integration
|
package com.commercetools.pspadapter.payone.domain.payone;
import com.commercetools.pspadapter.payone.domain.payone.exceptions.PayoneException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.http.message.BasicNameValuePair;
import org.junit.Before;
import org.junit.Test;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.ThrowableAssert.catchThrowable;
public class PayonePostServiceImplTest {
public static final String PAYONE_SERVER_API_URL = "http://some.url.org/payone";
private PayonePostServiceImpl payonePostService;
@Before
public void setup() throws PayoneException {
payonePostService = PayonePostServiceImpl.of(PAYONE_SERVER_API_URL);
}
@Test
public void shouldThrowConfigurationExceptionIfUrlIsEmptyOnInitialization() {
final Throwable throwable = catchThrowable(() -> PayonePostServiceImpl.of(""));
assertThat(throwable)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("The server api url must not be null or empty.");
}
@Test
public void shouldThrowConfigurationExceptionIfUrlIsNullOnInitialization() {
final Throwable throwable = catchThrowable(() -> PayonePostServiceImpl.of(null));
assertThat(throwable)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("The server api url must not be null or empty.");
}
@Test
public void shouldInitServiceWithCorrectUrl() {
assertThat(payonePostService.getServerAPIURL()).isEqualTo(PAYONE_SERVER_API_URL);
}
@Test
public void shouldBuildMapFromServerResponse() throws UnsupportedEncodingException {
String serverResponse = "paramA=a\nredirecturl=https://www.redirect.de/xxx\nstatus=SUCCESSFUL";
Map<String, String> result = payonePostService.buildMapFromResultParams(serverResponse);
assertThat(result).isNotEmpty();
assertThat(result).hasSize(3);
assertThat(result).containsEntry("paramA", "a");
assertThat(result).containsEntry("redirecturl", "https://www.redirect.de/xxx");
assertThat(result).containsEntry("status", "SUCCESSFUL");
}
@Test
public void shouldReturnEmptyMap() throws UnsupportedEncodingException {
String serverResponse = "=x=";
Map<String, String> result = payonePostService.buildMapFromResultParams(serverResponse);
assertThat(result).isEmpty();
}
@Test
public void getObjectMapWithExpandedLists() {
assertThat(payonePostService.getNameValuePairsWithExpandedLists(ImmutableMap.of())).hasSize(0);
final List<BasicNameValuePair> simple = payonePostService.getNameValuePairsWithExpandedLists(ImmutableMap.of("foo", "bar"));
assertThat(simple).hasSize(1);
assertThat(simple).contains(new BasicNameValuePair("foo", "bar"));
// for now only string/numeric values are tested
final List<BasicNameValuePair> withExpandedLists = payonePostService.getNameValuePairsWithExpandedLists(
ImmutableMap.<String, Object>builder()
.put("foo", "bar")
.put("woot", "wootValue")
.put("list1", ImmutableList.of(1, 2, 3))
.put("a", 42)
.put("empty", "")
.put("boolTrue", true)
.put("boolFalse", false)
.put("listString", new LinkedList<>(ImmutableList.of("ein", "zwei", "drei")))
.put("listDoubles", asList(3.14, 2.71, 9.81))
.build());
assertThat(withExpandedLists).containsExactlyInAnyOrder(
new BasicNameValuePair("foo", "bar"),
new BasicNameValuePair("woot", "wootValue"),
new BasicNameValuePair("a", "42"),
new BasicNameValuePair("empty", ""),
new BasicNameValuePair("boolTrue", "true"),
new BasicNameValuePair("boolFalse", "false"),
new BasicNameValuePair("list1[1]", "1"),
new BasicNameValuePair("list1[2]", "2"),
new BasicNameValuePair("list1[3]", "3"),
new BasicNameValuePair("listString[1]", "ein"),
new BasicNameValuePair("listString[2]", "zwei"),
new BasicNameValuePair("listString[3]", "drei"),
new BasicNameValuePair("listDoubles[1]", "3.14"),
new BasicNameValuePair("listDoubles[2]", "2.71"),
new BasicNameValuePair("listDoubles[3]", "9.81"));
final List<BasicNameValuePair> withEmptyLists = payonePostService.getNameValuePairsWithExpandedLists(
ImmutableMap.of("foo", new ArrayList<>(),
"bar", new LinkedList<>()));
assertThat(withEmptyLists).containsExactlyInAnyOrder(
new BasicNameValuePair("foo[]", ""),
new BasicNameValuePair("bar[]", ""));
}
}
|
service/src/test/java/com/commercetools/pspadapter/payone/domain/payone/PayonePostServiceImplTest.java
|
package com.commercetools.pspadapter.payone.domain.payone;
import com.commercetools.pspadapter.payone.domain.payone.exceptions.PayoneException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.http.message.BasicNameValuePair;
import org.junit.Before;
import org.junit.Test;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.ThrowableAssert.catchThrowable;
public class PayonePostServiceImplTest {
public static final String PAYONE_SERVER_API_URL = "http://some.url.org/payone";
private PayonePostServiceImpl payonePostService;
@Before
public void setup() throws PayoneException {
payonePostService = PayonePostServiceImpl.of(PAYONE_SERVER_API_URL);
}
@Test
public void shouldThrowConfigurationExceptionIfUrlIsEmptyOnInitialization() {
final Throwable throwable = catchThrowable(() -> PayonePostServiceImpl.of(""));
assertThat(throwable)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("The server api url must not be null or empty.");
}
@Test
public void shouldThrowConfigurationExceptionIfUrlIsNullOnInitialization() {
final Throwable throwable = catchThrowable(() -> PayonePostServiceImpl.of(null));
assertThat(throwable)
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("The server api url must not be null or empty.");
}
@Test
public void shouldInitServiceWithCorrectUrl() {
assertThat(payonePostService.getServerAPIURL()).isEqualTo(PAYONE_SERVER_API_URL);
}
@Test
public void shouldBuildMapFromServerResponse() throws UnsupportedEncodingException {
String serverResponse = "paramA=a\nredirecturl=https://www.redirect.de/xxx\nstatus=SUCCESSFUL";
Map<String, String> result = payonePostService.buildMapFromResultParams(serverResponse);
assertThat(result).isNotEmpty();
assertThat(result).hasSize(3);
assertThat(result).containsEntry("paramA", "a");
assertThat(result).containsEntry("redirecturl", "https://www.redirect.de/xxx");
assertThat(result).containsEntry("status", "SUCCESSFUL");
}
@Test
public void shouldReturnEmptyMap() throws UnsupportedEncodingException {
String serverResponse = "=x=";
Map<String, String> result = payonePostService.buildMapFromResultParams(serverResponse);
assertThat(result).isEmpty();
}
@Test
public void getObjectMapWithExpandedLists() {
assertThat(payonePostService.getNameValuePairsWithExpandedLists(ImmutableMap.of())).hasSize(0);
final List<BasicNameValuePair> simple = payonePostService.getNameValuePairsWithExpandedLists(ImmutableMap.of("foo", "bar"));
assertThat(simple).hasSize(1);
assertThat(simple).contains(new BasicNameValuePair("foo", "bar"));
// for now only string/numeric values are tested
final List<BasicNameValuePair> withExpandedLists = payonePostService.getNameValuePairsWithExpandedLists(
ImmutableMap.<String, Object>builder()
.put("foo", "bar")
.put("woot", "wootValue")
.put("list1", ImmutableList.of(1, 2, 3))
.put("a", 42)
.put("empty", "")
.put("boolTrue", true)
.put("boolFalse", false)
.put("listString", new LinkedList<>(ImmutableList.of("ein", "zwei", "drei")))
.put("listDoubles", asList(3.14, 2.71, 9.81))
.build());
assertThat(withExpandedLists).hasSize(15);
assertThat(withExpandedLists).contains(new BasicNameValuePair("foo", "bar"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("woot", "wootValue"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("a", "42"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("empty", ""));
assertThat(withExpandedLists).contains(new BasicNameValuePair("boolTrue", "true"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("boolFalse", "false"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("list1[1]", "1"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("list1[2]", "2"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("list1[3]", "3"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("listString[1]", "ein"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("listString[2]", "zwei"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("listString[3]", "drei"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("listDoubles[1]", "3.14"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("listDoubles[2]", "2.71"));
assertThat(withExpandedLists).contains(new BasicNameValuePair("listDoubles[3]", "9.81"));
final List<BasicNameValuePair> withEmptyLists = payonePostService.getNameValuePairsWithExpandedLists(
ImmutableMap.of("foo", new ArrayList<>(),
"bar", new LinkedList<>()));
assertThat(withEmptyLists.size()).isEqualTo(2);
assertThat(withEmptyLists).contains(new BasicNameValuePair("foo[]", ""));
assertThat(withEmptyLists).contains(new BasicNameValuePair("bar[]", ""));
}
}
|
#185: minor test update
|
service/src/test/java/com/commercetools/pspadapter/payone/domain/payone/PayonePostServiceImplTest.java
|
#185: minor test update
|
|
Java
|
mit
|
b92c401a0705bdbbb4a1637b5b9adc559d85e67a
| 0
|
bcvsolutions/CzechIdMng,bcvsolutions/CzechIdMng,bcvsolutions/CzechIdMng,bcvsolutions/CzechIdMng
|
package eu.bcvsolutions.idm.vs.service.impl;
import java.io.Serializable;
import java.text.MessageFormat;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import eu.bcvsolutions.idm.InitDemoData;
import eu.bcvsolutions.idm.acc.dto.SysSchemaAttributeDto;
import eu.bcvsolutions.idm.acc.dto.SysSchemaObjectClassDto;
import eu.bcvsolutions.idm.acc.dto.SysSystemAttributeMappingDto;
import eu.bcvsolutions.idm.acc.dto.SysSystemDto;
import eu.bcvsolutions.idm.acc.dto.SysSystemMappingDto;
import eu.bcvsolutions.idm.acc.dto.filter.SysSchemaAttributeFilter;
import eu.bcvsolutions.idm.acc.dto.filter.SysSystemEntityFilter;
import eu.bcvsolutions.idm.acc.dto.filter.SysSystemMappingFilter;
import eu.bcvsolutions.idm.acc.service.api.SysSchemaAttributeService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemAttributeMappingService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemEntityService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemMappingService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemService;
import eu.bcvsolutions.idm.core.api.domain.IdentityState;
import eu.bcvsolutions.idm.core.api.dto.IdmIdentityDto;
import eu.bcvsolutions.idm.core.api.dto.IdmRoleDto;
import eu.bcvsolutions.idm.core.api.exception.ForbiddenEntityException;
import eu.bcvsolutions.idm.core.api.service.IdmIdentityService;
import eu.bcvsolutions.idm.core.api.service.IdmRoleService;
import eu.bcvsolutions.idm.core.eav.api.dto.IdmFormAttributeDto;
import eu.bcvsolutions.idm.core.eav.api.dto.IdmFormDefinitionDto;
import eu.bcvsolutions.idm.core.eav.api.service.FormService;
import eu.bcvsolutions.idm.core.eav.api.service.IdmFormAttributeService;
import eu.bcvsolutions.idm.core.security.api.domain.GuardedString;
import eu.bcvsolutions.idm.core.security.api.domain.IdmBasePermission;
import eu.bcvsolutions.idm.core.security.api.dto.LoginDto;
import eu.bcvsolutions.idm.core.security.api.service.LoginService;
import eu.bcvsolutions.idm.ic.api.IcConnectorConfiguration;
import eu.bcvsolutions.idm.ic.api.IcConnectorObject;
import eu.bcvsolutions.idm.ic.api.IcObjectClass;
import eu.bcvsolutions.idm.ic.filter.api.IcResultsHandler;
import eu.bcvsolutions.idm.ic.impl.IcObjectClassImpl;
import eu.bcvsolutions.idm.ic.service.api.IcConnectorFacade;
import eu.bcvsolutions.idm.test.api.AbstractIntegrationTest;
import eu.bcvsolutions.idm.vs.TestHelper;
import eu.bcvsolutions.idm.vs.connector.basic.BasicVirtualConfiguration;
import eu.bcvsolutions.idm.vs.domain.VirtualSystemGroupPermission;
import eu.bcvsolutions.idm.vs.domain.VsOperationType;
import eu.bcvsolutions.idm.vs.domain.VsRequestState;
import eu.bcvsolutions.idm.vs.domain.VsValueChangeType;
import eu.bcvsolutions.idm.vs.dto.VsAccountDto;
import eu.bcvsolutions.idm.vs.dto.VsAttributeDto;
import eu.bcvsolutions.idm.vs.dto.VsConnectorObjectDto;
import eu.bcvsolutions.idm.vs.dto.VsRequestDto;
import eu.bcvsolutions.idm.vs.dto.VsSystemDto;
import eu.bcvsolutions.idm.vs.dto.filter.VsRequestFilter;
import eu.bcvsolutions.idm.vs.entity.VsAccount;
import eu.bcvsolutions.idm.vs.entity.VsRequest;
import eu.bcvsolutions.idm.vs.evaluator.VsRequestByImplementerEvaluator;
import eu.bcvsolutions.idm.vs.service.api.VsAccountService;
import eu.bcvsolutions.idm.vs.service.api.VsRequestService;
/**
* Virtual system request test
* + request filters
*
* @author Svanda
* @author Patrik Stloukal
*/
@Component
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class DefaultVsRequestServiceIntegrationTest extends AbstractIntegrationTest {
private static final String USER_ONE_NAME = "vsUserOne";
private static final String USER_IMPLEMENTER_NAME = "vsUserImplementer";
private static final String ROLE_ONE_NAME = "vsRoleOne";
private static final String USER_ONE_CHANGED_NAME = "vsUserOneChanged";
@Autowired
private TestHelper helper;
@Autowired
private VsRequestService requestService;
@Autowired
private VsAccountService accountService;
@Autowired
private FormService formService;
@Autowired
private LoginService loginService;
@Autowired
private IdmIdentityService identityService;
@Autowired
private IdmRoleService roleService;
@Autowired
private SysSystemService systemService;
@Autowired
private SysSchemaAttributeService schemaAttributeService;
@Autowired
private SysSystemAttributeMappingService systemAttributeMappingService;
@Autowired
private SysSystemMappingService systemMappingService;
@Autowired
private IdmFormAttributeService formAttributeService;
@Autowired
private SysSystemEntityService systemEntityService;
@Autowired
private IcConnectorFacade connectorFacade;
@Before
public void init() {
loginAsAdmin();
}
@After
public void logout() {
this.deleteAll(USER_ONE_NAME, USER_ONE_CHANGED_NAME, USER_IMPLEMENTER_NAME, ROLE_ONE_NAME);
super.logout();
}
@Test
public void createAndRealizeRequestTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
}
@Test
public void disableRequestTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
IdmIdentityDto identity = helper.createIdentity(USER_ONE_NAME);
this.assignRoleSystem(system, identity, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
Assert.assertEquals(Boolean.TRUE, account.isEnable());
super.logout();
loginAsAdmin();
// Disable the identity
identity.setState(IdentityState.DISABLED_MANUALLY);
identityService.save(identity);
// Find created requests
requests = requestService.find(requestFilter, null).getContent().stream()
.filter(r -> VsRequestState.IN_PROGRESS == r.getState()).collect(Collectors.toList());
Assert.assertEquals(1, requests.size());
request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.UPDATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
Assert.assertEquals(Boolean.FALSE, account.isEnable());
}
@Test
public void systemAccountFilterTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
IcConnectorConfiguration configuration = systemService.getConnectorConfiguration(system);
IcObjectClass objectClass = new IcObjectClassImpl("__ACCOUNT__");
List<String> uids = new ArrayList<>();
connectorFacade.search(system.getConnectorInstance(), configuration, objectClass, null, new IcResultsHandler() {
@Override
public boolean handle(IcConnectorObject connectorObject) {
uids.add(connectorObject.getUidValue());
return true;
}
});
Assert.assertEquals(1, uids.size());
Assert.assertEquals(USER_ONE_NAME, uids.get(0));
}
@Test
public void createAndCancelRequestTest() {
String reason = "cancel \"request\" reason!";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try cancel the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.cancel(request, reason);
Assert.assertEquals(VsRequestState.CANCELED, request.getState());
Assert.assertEquals(reason, request.getReason());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was canceled!", account);
}
@Test(expected = ForbiddenEntityException.class)
public void realizeRequestWithouRightTest() {
String reason = "cancel \"request\" reason!";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try cancel the request
super.logout();
loginService.login(new LoginDto(USER_ONE_NAME, new GuardedString("password")));
request = requestService.cancel(request, reason);
}
@Test
public void createMoreRequestsTest() {
String changed = "changed";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
IdmIdentityDto userOne = identityService.getByUsername(USER_ONE_NAME);
userOne.setFirstName(changed);
userOne.setLastName(changed);
identityService.save(userOne);
// Duplicated save ... not invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
VsRequestDto changeRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.UPDATE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with change not found!", changeRequest);
}
@Test
public void realizeUpdateAndDeleteRequestsTest() {
String changed = "changed";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
IdmIdentityDto userOne = identityService.getByUsername(USER_ONE_NAME);
userOne.setFirstName(changed);
userOne.setLastName(changed);
identityService.save(userOne);
// Delete identity
identityService.delete(userOne);
// Test read rights (none requests can be returned for UserOne)
IdmIdentityDto userTwo = helper.createIdentity("vsUserTwo");
super.logout();
loginService.login(new LoginDto(userTwo.getUsername(), new GuardedString("password")));
requests = requestService.find(requestFilter, null, IdmBasePermission.READ).getContent();
Assert.assertEquals("We found request without correct rights!", 0, requests.size());
// Test read rights (3 requests must be returned for UserImplementer)
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requests = requestService.find(requestFilter, null, IdmBasePermission.READ).getContent();
Assert.assertEquals(3, requests.size());
VsRequestDto changeRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.UPDATE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with change not found!", changeRequest);
VsRequestDto deleteRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.DELETE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with delete not found!", deleteRequest);
VsRequestDto createRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.CREATE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with create not found!", createRequest);
// Realize create request
request = requestService.realize(createRequest);
// Realize update request
request = requestService.realize(changeRequest);
// Realize delete request
request = requestService.realize(deleteRequest);
// Find only archived
requestFilter.setOnlyArchived(Boolean.TRUE);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(3, requests.size());
boolean foundNotRealized = requests.stream().filter(req -> VsRequestState.REALIZED != req.getState())
.findFirst().isPresent();
Assert.assertTrue("Found not realized requests!", !foundNotRealized);
}
@Test
public void checkMultivalueInWishObjectTest() {
String ldapGroupsName = "ldapGroups";
String changed = "changed";
List<String> attributes = new ArrayList<>(Lists.newArrayList(BasicVirtualConfiguration.DEFAULT_ATTRIBUTES));
attributes.add(ldapGroupsName);
// Create virtual system with extra attribute (ldapGroups)
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, attributes);
// Search attribute definition for ldapGroups and set him to multivalue
String virtualSystemKey = MessageFormat.format("{0}:systemId={1}", system.getConnectorKey().getFullName(),
system.getId().toString());
String type = VsAccount.class.getName();
IdmFormDefinitionDto definition = this.formService.getDefinition(type, virtualSystemKey);
IdmFormAttributeDto ldapGroupsFormAttr = formAttributeService.findAttribute(VsAccount.class.getName(),
definition.getCode(), ldapGroupsName);
Assert.assertNotNull("Ldap attribute muste exist!", ldapGroupsFormAttr);
ldapGroupsFormAttr.setMultiple(true);
// Change the name of this attribute. We want to check that logic no depends on the attribute name.
ldapGroupsFormAttr.setName(helper.createName());
formService.saveAttribute(ldapGroupsFormAttr);
// Generate schema for system (we need propagate multivalue setting)
SysSchemaObjectClassDto schema = systemService.generateSchema(system).get(0);
SysSchemaAttributeFilter schemaAttributeFilter = new SysSchemaAttributeFilter();
schemaAttributeFilter.setSystemId(system.getId());
List<SysSchemaAttributeDto> schemaAttributes = schemaAttributeService.find(schemaAttributeFilter, null)
.getContent();
SysSystemMappingFilter systemMappingFilter = new SysSystemMappingFilter();
systemMappingFilter.setSystemId(system.getId());
systemMappingFilter.setObjectClassId(schema.getId());
SysSystemMappingDto mapping = systemMappingService.find(systemMappingFilter, null).getContent().get(0);
for (SysSchemaAttributeDto schemaAttr : schemaAttributes) {
if (ldapGroupsName.equals(schemaAttr.getName())) {
SysSystemAttributeMappingDto attributeMapping = new SysSystemAttributeMappingDto();
attributeMapping.setUid(false);
attributeMapping.setEntityAttribute(false);
attributeMapping.setExtendedAttribute(true);
attributeMapping.setIdmPropertyName(ldapGroupsName);
attributeMapping.setName(schemaAttr.getName());
attributeMapping.setSchemaAttribute(schemaAttr.getId());
attributeMapping.setSystemMapping(mapping.getId());
systemAttributeMappingService.save(attributeMapping);
}
}
IdmIdentityDto userOne = helper.createIdentity(USER_ONE_NAME);
List<Serializable> initList = ImmutableList.of("TEST1", "TEST2", "TEST3");
formService.saveValues(userOne, ldapGroupsName, initList);
this.assignRoleSystem(system, userOne, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto createRequest = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, createRequest.getUid());
Assert.assertEquals(VsOperationType.CREATE, createRequest.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, createRequest.getState());
VsConnectorObjectDto wish = requestService.getWishConnectorObject(createRequest);
boolean findAttributeWithouChange = wish.getAttributes().stream().filter(attribute -> !attribute.isChanged())
.findFirst().isPresent();
Assert.assertTrue(!findAttributeWithouChange);
// Check on exist ldapGroups attribute with three values
VsAttributeDto ldapGroupAttribute = wish.getAttributes().stream()
.filter(attribute -> ldapGroupsName.equals(attribute.getName())).findFirst().get();
Assert.assertTrue(ldapGroupAttribute.isMultivalue());
Assert.assertEquals(3, ldapGroupAttribute.getValues().size());
// Change multivalue attribute
List<Serializable> changeList = ImmutableList.of("TEST1", changed, "TEST3");
formService.saveValues(userOne, ldapGroupsName, changeList);
// Invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
VsRequestDto changeRequest = requests.stream().filter(req -> VsOperationType.UPDATE == req.getOperationType())
.findFirst().get();
wish = requestService.getWishConnectorObject(changeRequest);
ldapGroupAttribute = wish.getAttributes().stream()
.filter(attribute -> ldapGroupsName.equals(attribute.getName())).findFirst().get();
Assert.assertTrue(ldapGroupAttribute.isMultivalue());
// Wish must contains three values (all add) ... because previous create
// request is not realize yet. Wish show changes versus reals state in
// VsAccount.
Assert.assertEquals(3, ldapGroupAttribute.getValues().size());
// We realize the create request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requestService.realize(createRequest);
// Refresh wish
wish = requestService.getWishConnectorObject(changeRequest);
ldapGroupAttribute = wish.getAttributes().stream()
.filter(attribute -> ldapGroupsName.equals(attribute.getName())).findFirst().get();
Assert.assertTrue(ldapGroupAttribute.isMultivalue());
// Wish must contains four values ... two without change, one delete and
// one add value
Assert.assertEquals(4, ldapGroupAttribute.getValues().size());
// Find unchanged value
boolean findCorrectTest1Value = ldapGroupAttribute
.getValues().stream().filter(value -> value.getValue().equals(initList.get(0))
&& value.getOldValue().equals(initList.get(0)) && value.getChange() == null)
.findFirst().isPresent();
Assert.assertTrue(findCorrectTest1Value);
// Find deleted value
boolean findCorrectDeletedTest2Value = ldapGroupAttribute.getValues().stream()
.filter(value -> value.getValue().equals(initList.get(1)) && value.getOldValue().equals(initList.get(1))
&& VsValueChangeType.REMOVED == value.getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectDeletedTest2Value);
// Find added value
boolean findCorrectCreatedChangedValue = ldapGroupAttribute.getValues().stream()
.filter(value -> value.getValue().equals(changed) && value.getOldValue() == null
&& VsValueChangeType.ADDED == value.getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectCreatedChangedValue);
}
@Test
public void checkSinglevalueInWishObjectTest() {
String changed = "changed";
String firstName = "firstName";
String lastName = "lastName";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
IdmIdentityDto userOne = helper.createIdentity(USER_ONE_NAME);
userOne.setFirstName(firstName);
userOne.setLastName(lastName);
identityService.save(userOne);
this.assignRoleSystem(system, userOne, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
requestFilter.setState(VsRequestState.IN_PROGRESS);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto createRequest = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, createRequest.getUid());
Assert.assertEquals(VsOperationType.CREATE, createRequest.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, createRequest.getState());
VsConnectorObjectDto wish = requestService.getWishConnectorObject(createRequest);
boolean findAttributeWithouChange = wish.getAttributes().stream().filter(attribute -> !attribute.isChanged())
.findFirst().isPresent();
Assert.assertTrue(!findAttributeWithouChange);
// Change singlevalue attributes
userOne.setFirstName(changed);
userOne.setLastName(changed);
// Invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
// We realize the create request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requestService.realize(createRequest);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
// get wish
wish = requestService.getWishConnectorObject(requests.get(0));
Assert.assertEquals(2, wish.getAttributes().stream().filter(attr -> attr.isChanged()).count());
// Find change for firstName value
boolean findCorrectChangedFirstName = wish.getAttributes().stream()
.filter(attr -> attr.getValue() != null && attr.getValue().getValue().equals(changed)
&& attr.getValue().getOldValue().equals(firstName)
&& VsValueChangeType.UPDATED == attr.getValue().getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectChangedFirstName);
// Find change for lastName value
boolean findCorrectChangedLastName = wish.getAttributes().stream()
.filter(attr -> attr.getValue() != null && attr.getValue().getValue().equals(changed)
&& attr.getValue().getOldValue().equals(lastName)
&& VsValueChangeType.UPDATED == attr.getValue().getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectChangedLastName);
}
@Test
public void changeUidTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
IdmIdentityDto userOne = helper.createIdentity(USER_ONE_NAME);
identityService.save(userOne);
this.assignRoleSystem(system, userOne, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
requestFilter.setState(VsRequestState.IN_PROGRESS);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto createRequest = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, createRequest.getUid());
Assert.assertEquals(VsOperationType.CREATE, createRequest.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, createRequest.getState());
VsConnectorObjectDto wish = requestService.getWishConnectorObject(createRequest);
boolean findAttributeWithouChange = wish.getAttributes().stream().filter(attribute -> !attribute.isChanged())
.findFirst().isPresent();
Assert.assertTrue(!findAttributeWithouChange);
// Change username attributes
userOne.setUsername(USER_ONE_CHANGED_NAME);
// Invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
// We realize the create request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requestService.realize(createRequest);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
// get wish
wish = requestService.getWishConnectorObject(requests.get(0));
Assert.assertEquals(1, wish.getAttributes().stream().filter(attr -> attr.isChanged()).count());
// Find change for firstName value
boolean findCorrectChangedUserName = wish.getAttributes().stream()
.filter(attr -> attr.getValue().getValue().equals(USER_ONE_CHANGED_NAME)
&& attr.getValue().getOldValue().equals(USER_ONE_NAME)
&& VsValueChangeType.UPDATED == attr.getValue().getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectChangedUserName);
SysSystemEntityFilter systemEntityFilter = new SysSystemEntityFilter();
systemEntityFilter.setSystemId(system.getId());
systemEntityFilter.setUid(USER_ONE_NAME);
boolean oldUserNameExist = !systemEntityService.find(systemEntityFilter, null).getContent().isEmpty();
Assert.assertTrue(oldUserNameExist);
// Realize change username
requestService.realize(requests.get(0));
// We expects change UID in SystemEntity.UID
oldUserNameExist = !systemEntityService.find(systemEntityFilter, null).getContent().isEmpty();
Assert.assertTrue(!oldUserNameExist);
systemEntityFilter.setUid(USER_ONE_CHANGED_NAME);
boolean changedUserNameExist = !systemEntityService.find(systemEntityFilter, null).getContent().isEmpty();
Assert.assertTrue(changedUserNameExist);
}
@Test
public void dateTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole();
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
requestFilter.setUid(identity.getUsername());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setCreatedAfter(ZonedDateTime.now().minusSeconds(10));
requestFilter.setCreatedBefore(ZonedDateTime.now());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setCreatedAfter(ZonedDateTime.now().plusMinutes(10));
requestFilter.setCreatedBefore(ZonedDateTime.now().plusMinutes(11));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
requestFilter.setCreatedAfter(ZonedDateTime.now().minusMinutes(10));
requestFilter.setCreatedBefore(ZonedDateTime.now().minusMinutes(9));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
}
@Test
public void modifiedDateTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole();
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
requestFilter.setUid(identity.getUsername());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setModifiedAfter(ZonedDateTime.now().minusSeconds(10));
requestFilter.setModifiedBefore(ZonedDateTime.now());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setModifiedAfter(ZonedDateTime.now().plusMinutes(10));
requestFilter.setModifiedBefore(ZonedDateTime.now().plusMinutes(11));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
requestFilter.setModifiedAfter(ZonedDateTime.now().minusMinutes(10));
requestFilter.setModifiedBefore(ZonedDateTime.now().minusMinutes(9));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
}
@Test
public void systemTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole(helper.createName());
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity2 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity3 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity4 = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity2.getId()), false, roleOne);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
helper.assignRoles(helper.getPrimeContract(identity3.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity4.getId()), false, roleOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(4, requests.size());
requestFilter.setUid(identity.getUsername());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());// identity uid filter test
}
@Test
public void filterTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole();
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity2 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity3 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity4 = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity2.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity3.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity4.getId()), false, roleOne);
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(4, requests.size());
VsRequestDto request = requests.get(0);
requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
requestFilter.setOnlyArchived(true);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setOnlyArchived(null);
requestFilter.setState(VsRequestState.IN_PROGRESS);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(3, requests.size());
requestFilter.setConnectorKey(request.getConnectorKey());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(3, requests.size());
}
@Test
public void createAndRealizeRequestWithNoteTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
String note = helper.createName();
request = requestService.realize(request, note);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
request = requestService.get(request.getId());
Assert.assertEquals(note, request.getReason());
}
/**
* Method for create role, assign role to system and to user.
*
* @param USER_ONE_NAME
* @param USER_IMPLEMENTER_NAME
* @param ROLE_ONE_NAME
* @return
*/
public SysSystemDto assignRoleSystem(SysSystemDto system, IdmIdentityDto userOne, String roleOneName) {
IdmRoleDto roleOne = helper.createRole(roleOneName);
// Create policy for vs evaluator and user role
helper.createAuthorizationPolicy(this.createDefaultRole().getId(),
VirtualSystemGroupPermission.VSREQUEST, VsRequest.class, VsRequestByImplementerEvaluator.class,
IdmBasePermission.ADMIN);
// Assign system to role
helper.createRoleSystem(roleOne, system);
helper.assignRoles(helper.getPrimeContract(userOne.getId()), false, roleOne);
return system;
}
public SysSystemDto createVirtualSystem(String userImplementerName, List<String> attributes) {
IdmIdentityDto userImplementer = helper.createIdentity(userImplementerName);
VsSystemDto config = new VsSystemDto();
config.setName("vsSystemOne" + new Date().getTime());
config.setImplementers(ImmutableList.of(userImplementer.getId()));
if (attributes != null) {
config.setAttributes(attributes);
}
SysSystemDto system = helper.createVirtualSystem(config);
Assert.assertNotNull(system);
return system;
}
public void deleteAll(String userOneName, String userOneChangedName, String userImplementerName,
String roleOneName) {
if (identityService.getByUsername(userOneName) != null) {
identityService.delete(identityService.getByUsername(userOneName));
}
if (identityService.getByUsername(userOneChangedName) != null) {
identityService.delete(identityService.getByUsername(userOneChangedName));
}
if (identityService.getByUsername(userImplementerName) != null) {
identityService.delete(identityService.getByUsername(userImplementerName));
}
if (roleService.getByCode(roleOneName) != null) {
roleService.delete(roleService.getByCode(roleOneName));
}
}
/**
* Method check if exists role with name/code defined in {@link InitDemoData#DEFAULT_ROLE_NAME}
* @return
*/
public IdmRoleDto createDefaultRole() {
IdmRoleDto defaultRole = roleService.getByCode(InitDemoData.DEFAULT_ROLE_NAME);
if (defaultRole != null) {
return defaultRole;
}
//
defaultRole = new IdmRoleDto();
defaultRole.setCode(InitDemoData.DEFAULT_ROLE_NAME);
return roleService.save(defaultRole);
}
}
|
Realization/backend/vs/src/test/java/eu/bcvsolutions/idm/vs/service/impl/DefaultVsRequestServiceIntegrationTest.java
|
package eu.bcvsolutions.idm.vs.service.impl;
import java.io.Serializable;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
import java.time.ZonedDateTime;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import eu.bcvsolutions.idm.InitDemoData;
import eu.bcvsolutions.idm.acc.dto.SysSchemaAttributeDto;
import eu.bcvsolutions.idm.acc.dto.SysSchemaObjectClassDto;
import eu.bcvsolutions.idm.acc.dto.SysSystemAttributeMappingDto;
import eu.bcvsolutions.idm.acc.dto.SysSystemDto;
import eu.bcvsolutions.idm.acc.dto.SysSystemMappingDto;
import eu.bcvsolutions.idm.acc.dto.filter.SysSchemaAttributeFilter;
import eu.bcvsolutions.idm.acc.dto.filter.SysSystemEntityFilter;
import eu.bcvsolutions.idm.acc.dto.filter.SysSystemMappingFilter;
import eu.bcvsolutions.idm.acc.service.api.SysSchemaAttributeService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemAttributeMappingService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemEntityService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemMappingService;
import eu.bcvsolutions.idm.acc.service.api.SysSystemService;
import eu.bcvsolutions.idm.core.api.domain.IdentityState;
import eu.bcvsolutions.idm.core.api.dto.IdmIdentityDto;
import eu.bcvsolutions.idm.core.api.dto.IdmRoleDto;
import eu.bcvsolutions.idm.core.api.exception.ForbiddenEntityException;
import eu.bcvsolutions.idm.core.api.service.IdmIdentityService;
import eu.bcvsolutions.idm.core.api.service.IdmRoleService;
import eu.bcvsolutions.idm.core.eav.api.dto.IdmFormAttributeDto;
import eu.bcvsolutions.idm.core.eav.api.dto.IdmFormDefinitionDto;
import eu.bcvsolutions.idm.core.eav.api.service.FormService;
import eu.bcvsolutions.idm.core.eav.api.service.IdmFormAttributeService;
import eu.bcvsolutions.idm.core.security.api.domain.GuardedString;
import eu.bcvsolutions.idm.core.security.api.domain.IdmBasePermission;
import eu.bcvsolutions.idm.core.security.api.dto.LoginDto;
import eu.bcvsolutions.idm.core.security.api.service.LoginService;
import eu.bcvsolutions.idm.ic.api.IcConnectorConfiguration;
import eu.bcvsolutions.idm.ic.api.IcConnectorObject;
import eu.bcvsolutions.idm.ic.api.IcObjectClass;
import eu.bcvsolutions.idm.ic.filter.api.IcResultsHandler;
import eu.bcvsolutions.idm.ic.impl.IcObjectClassImpl;
import eu.bcvsolutions.idm.ic.service.api.IcConnectorFacade;
import eu.bcvsolutions.idm.test.api.AbstractIntegrationTest;
import eu.bcvsolutions.idm.vs.TestHelper;
import eu.bcvsolutions.idm.vs.connector.basic.BasicVirtualConfiguration;
import eu.bcvsolutions.idm.vs.domain.VirtualSystemGroupPermission;
import eu.bcvsolutions.idm.vs.domain.VsOperationType;
import eu.bcvsolutions.idm.vs.domain.VsRequestState;
import eu.bcvsolutions.idm.vs.domain.VsValueChangeType;
import eu.bcvsolutions.idm.vs.dto.VsAccountDto;
import eu.bcvsolutions.idm.vs.dto.VsAttributeDto;
import eu.bcvsolutions.idm.vs.dto.VsConnectorObjectDto;
import eu.bcvsolutions.idm.vs.dto.VsRequestDto;
import eu.bcvsolutions.idm.vs.dto.VsSystemDto;
import eu.bcvsolutions.idm.vs.dto.filter.VsRequestFilter;
import eu.bcvsolutions.idm.vs.entity.VsAccount;
import eu.bcvsolutions.idm.vs.entity.VsRequest;
import eu.bcvsolutions.idm.vs.evaluator.VsRequestByImplementerEvaluator;
import eu.bcvsolutions.idm.vs.service.api.VsAccountService;
import eu.bcvsolutions.idm.vs.service.api.VsRequestService;
/**
* Virtual system request test
* + request filters
*
* @author Svanda
* @author Patrik Stloukal
*/
@Component
public class DefaultVsRequestServiceIntegrationTest extends AbstractIntegrationTest {
private static final String USER_ONE_NAME = "vsUserOne";
private static final String USER_IMPLEMENTER_NAME = "vsUserImplementer";
private static final String ROLE_ONE_NAME = "vsRoleOne";
private static final String USER_ONE_CHANGED_NAME = "vsUserOneChanged";
@Autowired
private TestHelper helper;
@Autowired
private VsRequestService requestService;
@Autowired
private VsAccountService accountService;
@Autowired
private FormService formService;
@Autowired
private LoginService loginService;
@Autowired
private IdmIdentityService identityService;
@Autowired
private IdmRoleService roleService;
@Autowired
private SysSystemService systemService;
@Autowired
private SysSchemaAttributeService schemaAttributeService;
@Autowired
private SysSystemAttributeMappingService systemAttributeMappingService;
@Autowired
private SysSystemMappingService systemMappingService;
@Autowired
private IdmFormAttributeService formAttributeService;
@Autowired
private SysSystemEntityService systemEntityService;
@Autowired
private IcConnectorFacade connectorFacade;
@Before
public void init() {
loginAsAdmin();
}
@After
public void logout() {
this.deleteAll(USER_ONE_NAME, USER_ONE_CHANGED_NAME, USER_IMPLEMENTER_NAME, ROLE_ONE_NAME);
super.logout();
}
@Test
public void createAndRealizeRequestTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
}
@Test
public void disableRequestTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
IdmIdentityDto identity = helper.createIdentity(USER_ONE_NAME);
this.assignRoleSystem(system, identity, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
Assert.assertEquals(Boolean.TRUE, account.isEnable());
super.logout();
loginAsAdmin();
// Disable the identity
identity.setState(IdentityState.DISABLED_MANUALLY);
identityService.save(identity);
// Find created requests
requests = requestService.find(requestFilter, null).getContent().stream()
.filter(r -> VsRequestState.IN_PROGRESS == r.getState()).collect(Collectors.toList());
Assert.assertEquals(1, requests.size());
request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.UPDATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
Assert.assertEquals(Boolean.FALSE, account.isEnable());
}
@Test
public void systemAccountFilterTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
IcConnectorConfiguration configuration = systemService.getConnectorConfiguration(system);
IcObjectClass objectClass = new IcObjectClassImpl("__ACCOUNT__");
List<String> uids = new ArrayList<>();
connectorFacade.search(system.getConnectorInstance(), configuration, objectClass, null, new IcResultsHandler() {
@Override
public boolean handle(IcConnectorObject connectorObject) {
uids.add(connectorObject.getUidValue());
return true;
}
});
Assert.assertEquals(1, uids.size());
Assert.assertEquals(USER_ONE_NAME, uids.get(0));
}
@Test
public void createAndCancelRequestTest() {
String reason = "cancel \"request\" reason!";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try cancel the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
request = requestService.cancel(request, reason);
Assert.assertEquals(VsRequestState.CANCELED, request.getState());
Assert.assertEquals(reason, request.getReason());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was canceled!", account);
}
@Test(expected = ForbiddenEntityException.class)
public void realizeRequestWithouRightTest() {
String reason = "cancel \"request\" reason!";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try cancel the request
super.logout();
loginService.login(new LoginDto(USER_ONE_NAME, new GuardedString("password")));
request = requestService.cancel(request, reason);
}
@Test
public void createMoreRequestsTest() {
String changed = "changed";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
IdmIdentityDto userOne = identityService.getByUsername(USER_ONE_NAME);
userOne.setFirstName(changed);
userOne.setLastName(changed);
identityService.save(userOne);
// Duplicated save ... not invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
VsRequestDto changeRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.UPDATE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with change not found!", changeRequest);
}
@Test
public void realizeUpdateAndDeleteRequestsTest() {
String changed = "changed";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
IdmIdentityDto userOne = identityService.getByUsername(USER_ONE_NAME);
userOne.setFirstName(changed);
userOne.setLastName(changed);
identityService.save(userOne);
// Delete identity
identityService.delete(userOne);
// Test read rights (none requests can be returned for UserOne)
IdmIdentityDto userTwo = helper.createIdentity("vsUserTwo");
super.logout();
loginService.login(new LoginDto(userTwo.getUsername(), new GuardedString("password")));
requests = requestService.find(requestFilter, null, IdmBasePermission.READ).getContent();
Assert.assertEquals("We found request without correct rights!", 0, requests.size());
// Test read rights (3 requests must be returned for UserImplementer)
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requests = requestService.find(requestFilter, null, IdmBasePermission.READ).getContent();
Assert.assertEquals(3, requests.size());
VsRequestDto changeRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.UPDATE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with change not found!", changeRequest);
VsRequestDto deleteRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.DELETE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with delete not found!", deleteRequest);
VsRequestDto createRequest = requests.stream().filter(
req -> VsRequestState.IN_PROGRESS == req.getState() && VsOperationType.CREATE == req.getOperationType())
.findFirst().orElse(null);
Assert.assertNotNull("Request with create not found!", createRequest);
// Realize create request
request = requestService.realize(createRequest);
// Realize update request
request = requestService.realize(changeRequest);
// Realize delete request
request = requestService.realize(deleteRequest);
// Find only archived
requestFilter.setOnlyArchived(Boolean.TRUE);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(3, requests.size());
boolean foundNotRealized = requests.stream().filter(req -> VsRequestState.REALIZED != req.getState())
.findFirst().isPresent();
Assert.assertTrue("Found not realized requests!", !foundNotRealized);
}
@Test
public void checkMultivalueInWishObjectTest() {
String ldapGroupsName = "ldapGroups";
String changed = "changed";
List<String> attributes = new ArrayList<>(Lists.newArrayList(BasicVirtualConfiguration.DEFAULT_ATTRIBUTES));
attributes.add(ldapGroupsName);
// Create virtual system with extra attribute (ldapGroups)
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, attributes);
// Search attribute definition for ldapGroups and set him to multivalue
String virtualSystemKey = MessageFormat.format("{0}:systemId={1}", system.getConnectorKey().getFullName(),
system.getId().toString());
String type = VsAccount.class.getName();
IdmFormDefinitionDto definition = this.formService.getDefinition(type, virtualSystemKey);
IdmFormAttributeDto ldapGroupsFormAttr = formAttributeService.findAttribute(VsAccount.class.getName(),
definition.getCode(), ldapGroupsName);
Assert.assertNotNull("Ldap attribute muste exist!", ldapGroupsFormAttr);
ldapGroupsFormAttr.setMultiple(true);
// Change the name of this attribute. We want to check that logic no depends on the attribute name.
ldapGroupsFormAttr.setName(helper.createName());
formService.saveAttribute(ldapGroupsFormAttr);
// Generate schema for system (we need propagate multivalue setting)
SysSchemaObjectClassDto schema = systemService.generateSchema(system).get(0);
SysSchemaAttributeFilter schemaAttributeFilter = new SysSchemaAttributeFilter();
schemaAttributeFilter.setSystemId(system.getId());
List<SysSchemaAttributeDto> schemaAttributes = schemaAttributeService.find(schemaAttributeFilter, null)
.getContent();
SysSystemMappingFilter systemMappingFilter = new SysSystemMappingFilter();
systemMappingFilter.setSystemId(system.getId());
systemMappingFilter.setObjectClassId(schema.getId());
SysSystemMappingDto mapping = systemMappingService.find(systemMappingFilter, null).getContent().get(0);
for (SysSchemaAttributeDto schemaAttr : schemaAttributes) {
if (ldapGroupsName.equals(schemaAttr.getName())) {
SysSystemAttributeMappingDto attributeMapping = new SysSystemAttributeMappingDto();
attributeMapping.setUid(false);
attributeMapping.setEntityAttribute(false);
attributeMapping.setExtendedAttribute(true);
attributeMapping.setIdmPropertyName(ldapGroupsName);
attributeMapping.setName(schemaAttr.getName());
attributeMapping.setSchemaAttribute(schemaAttr.getId());
attributeMapping.setSystemMapping(mapping.getId());
systemAttributeMappingService.save(attributeMapping);
}
}
IdmIdentityDto userOne = helper.createIdentity(USER_ONE_NAME);
List<Serializable> initList = ImmutableList.of("TEST1", "TEST2", "TEST3");
formService.saveValues(userOne, ldapGroupsName, initList);
this.assignRoleSystem(system, userOne, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto createRequest = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, createRequest.getUid());
Assert.assertEquals(VsOperationType.CREATE, createRequest.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, createRequest.getState());
VsConnectorObjectDto wish = requestService.getWishConnectorObject(createRequest);
boolean findAttributeWithouChange = wish.getAttributes().stream().filter(attribute -> !attribute.isChanged())
.findFirst().isPresent();
Assert.assertTrue(!findAttributeWithouChange);
// Check on exist ldapGroups attribute with three values
VsAttributeDto ldapGroupAttribute = wish.getAttributes().stream()
.filter(attribute -> ldapGroupsName.equals(attribute.getName())).findFirst().get();
Assert.assertTrue(ldapGroupAttribute.isMultivalue());
Assert.assertEquals(3, ldapGroupAttribute.getValues().size());
// Change multivalue attribute
List<Serializable> changeList = ImmutableList.of("TEST1", changed, "TEST3");
formService.saveValues(userOne, ldapGroupsName, changeList);
// Invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
VsRequestDto changeRequest = requests.stream().filter(req -> VsOperationType.UPDATE == req.getOperationType())
.findFirst().get();
wish = requestService.getWishConnectorObject(changeRequest);
ldapGroupAttribute = wish.getAttributes().stream()
.filter(attribute -> ldapGroupsName.equals(attribute.getName())).findFirst().get();
Assert.assertTrue(ldapGroupAttribute.isMultivalue());
// Wish must contains three values (all add) ... because previous create
// request is not realize yet. Wish show changes versus reals state in
// VsAccount.
Assert.assertEquals(3, ldapGroupAttribute.getValues().size());
// We realize the create request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requestService.realize(createRequest);
// Refresh wish
wish = requestService.getWishConnectorObject(changeRequest);
ldapGroupAttribute = wish.getAttributes().stream()
.filter(attribute -> ldapGroupsName.equals(attribute.getName())).findFirst().get();
Assert.assertTrue(ldapGroupAttribute.isMultivalue());
// Wish must contains four values ... two without change, one delete and
// one add value
Assert.assertEquals(4, ldapGroupAttribute.getValues().size());
// Find unchanged value
boolean findCorrectTest1Value = ldapGroupAttribute
.getValues().stream().filter(value -> value.getValue().equals(initList.get(0))
&& value.getOldValue().equals(initList.get(0)) && value.getChange() == null)
.findFirst().isPresent();
Assert.assertTrue(findCorrectTest1Value);
// Find deleted value
boolean findCorrectDeletedTest2Value = ldapGroupAttribute.getValues().stream()
.filter(value -> value.getValue().equals(initList.get(1)) && value.getOldValue().equals(initList.get(1))
&& VsValueChangeType.REMOVED == value.getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectDeletedTest2Value);
// Find added value
boolean findCorrectCreatedChangedValue = ldapGroupAttribute.getValues().stream()
.filter(value -> value.getValue().equals(changed) && value.getOldValue() == null
&& VsValueChangeType.ADDED == value.getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectCreatedChangedValue);
}
@Test
public void checkSinglevalueInWishObjectTest() {
String changed = "changed";
String firstName = "firstName";
String lastName = "lastName";
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
IdmIdentityDto userOne = helper.createIdentity(USER_ONE_NAME);
userOne.setFirstName(firstName);
userOne.setLastName(lastName);
identityService.save(userOne);
this.assignRoleSystem(system, userOne, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
requestFilter.setState(VsRequestState.IN_PROGRESS);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto createRequest = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, createRequest.getUid());
Assert.assertEquals(VsOperationType.CREATE, createRequest.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, createRequest.getState());
VsConnectorObjectDto wish = requestService.getWishConnectorObject(createRequest);
boolean findAttributeWithouChange = wish.getAttributes().stream().filter(attribute -> !attribute.isChanged())
.findFirst().isPresent();
Assert.assertTrue(!findAttributeWithouChange);
// Change singlevalue attributes
userOne.setFirstName(changed);
userOne.setLastName(changed);
// Invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
// We realize the create request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requestService.realize(createRequest);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
// get wish
wish = requestService.getWishConnectorObject(requests.get(0));
Assert.assertEquals(2, wish.getAttributes().stream().filter(attr -> attr.isChanged()).count());
// Find change for firstName value
boolean findCorrectChangedFirstName = wish.getAttributes().stream()
.filter(attr -> attr.getValue() != null && attr.getValue().getValue().equals(changed)
&& attr.getValue().getOldValue().equals(firstName)
&& VsValueChangeType.UPDATED == attr.getValue().getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectChangedFirstName);
// Find change for lastName value
boolean findCorrectChangedLastName = wish.getAttributes().stream()
.filter(attr -> attr.getValue() != null && attr.getValue().getValue().equals(changed)
&& attr.getValue().getOldValue().equals(lastName)
&& VsValueChangeType.UPDATED == attr.getValue().getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectChangedLastName);
}
@Test
public void changeUidTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
IdmIdentityDto userOne = helper.createIdentity(USER_ONE_NAME);
identityService.save(userOne);
this.assignRoleSystem(system, userOne, ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
requestFilter.setState(VsRequestState.IN_PROGRESS);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto createRequest = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, createRequest.getUid());
Assert.assertEquals(VsOperationType.CREATE, createRequest.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, createRequest.getState());
VsConnectorObjectDto wish = requestService.getWishConnectorObject(createRequest);
boolean findAttributeWithouChange = wish.getAttributes().stream().filter(attribute -> !attribute.isChanged())
.findFirst().isPresent();
Assert.assertTrue(!findAttributeWithouChange);
// Change username attributes
userOne.setUsername(USER_ONE_CHANGED_NAME);
// Invoke provisioning
identityService.save(userOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
// We realize the create request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
requestService.realize(createRequest);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
// get wish
wish = requestService.getWishConnectorObject(requests.get(0));
Assert.assertEquals(1, wish.getAttributes().stream().filter(attr -> attr.isChanged()).count());
// Find change for firstName value
boolean findCorrectChangedUserName = wish.getAttributes().stream()
.filter(attr -> attr.getValue().getValue().equals(USER_ONE_CHANGED_NAME)
&& attr.getValue().getOldValue().equals(USER_ONE_NAME)
&& VsValueChangeType.UPDATED == attr.getValue().getChange())
.findFirst().isPresent();
Assert.assertTrue(findCorrectChangedUserName);
SysSystemEntityFilter systemEntityFilter = new SysSystemEntityFilter();
systemEntityFilter.setSystemId(system.getId());
systemEntityFilter.setUid(USER_ONE_NAME);
boolean oldUserNameExist = !systemEntityService.find(systemEntityFilter, null).getContent().isEmpty();
Assert.assertTrue(oldUserNameExist);
// Realize change username
requestService.realize(requests.get(0));
// We expects change UID in SystemEntity.UID
oldUserNameExist = !systemEntityService.find(systemEntityFilter, null).getContent().isEmpty();
Assert.assertTrue(!oldUserNameExist);
systemEntityFilter.setUid(USER_ONE_CHANGED_NAME);
boolean changedUserNameExist = !systemEntityService.find(systemEntityFilter, null).getContent().isEmpty();
Assert.assertTrue(changedUserNameExist);
}
@Test
public void dateTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole();
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
requestFilter.setUid(identity.getUsername());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setCreatedAfter(ZonedDateTime.now().minusSeconds(10));
requestFilter.setCreatedBefore(ZonedDateTime.now());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setCreatedAfter(ZonedDateTime.now().plusMinutes(10));
requestFilter.setCreatedBefore(ZonedDateTime.now().plusMinutes(11));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
requestFilter.setCreatedAfter(ZonedDateTime.now().minusMinutes(10));
requestFilter.setCreatedBefore(ZonedDateTime.now().minusMinutes(9));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
}
@Test
public void modifiedDateTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole();
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
requestFilter.setUid(identity.getUsername());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setModifiedAfter(ZonedDateTime.now().minusSeconds(10));
requestFilter.setModifiedBefore(ZonedDateTime.now());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setModifiedAfter(ZonedDateTime.now().plusMinutes(10));
requestFilter.setModifiedBefore(ZonedDateTime.now().plusMinutes(11));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
requestFilter.setModifiedAfter(ZonedDateTime.now().minusMinutes(10));
requestFilter.setModifiedBefore(ZonedDateTime.now().minusMinutes(9));
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(0, requests.size());
}
@Test
public void systemTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole(helper.createName());
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity2 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity3 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity4 = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity2.getId()), false, roleOne);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(2, requests.size());
helper.assignRoles(helper.getPrimeContract(identity3.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity4.getId()), false, roleOne);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(4, requests.size());
requestFilter.setUid(identity.getUsername());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());// identity uid filter test
}
@Test
public void filterTest() {
SysSystemDto virtualSystem = helper.createVirtualSystem(helper.createName());
IdmRoleDto roleOne = helper.createRole();
IdmIdentityDto identity = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity2 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity3 = helper.createIdentity((GuardedString) null);
IdmIdentityDto identity4 = helper.createIdentity((GuardedString) null);
// Assign system to role
helper.createRoleSystem(roleOne, virtualSystem);
helper.assignRoles(helper.getPrimeContract(identity.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity2.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity3.getId()), false, roleOne);
helper.assignRoles(helper.getPrimeContract(identity4.getId()), false, roleOne);
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(virtualSystem.getId());
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(4, requests.size());
VsRequestDto request = requests.get(0);
requestService.realize(request);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
requestFilter.setOnlyArchived(true);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
requestFilter.setOnlyArchived(null);
requestFilter.setState(VsRequestState.IN_PROGRESS);
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(3, requests.size());
requestFilter.setConnectorKey(request.getConnectorKey());
requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(3, requests.size());
}
@Test
public void createAndRealizeRequestWithNoteTest() {
SysSystemDto system = this.createVirtualSystem(USER_IMPLEMENTER_NAME, null);
this.assignRoleSystem(system, helper.createIdentity(USER_ONE_NAME), ROLE_ONE_NAME);
// Find created requests
VsRequestFilter requestFilter = new VsRequestFilter();
requestFilter.setSystemId(system.getId());
requestFilter.setUid(USER_ONE_NAME);
List<VsRequestDto> requests = requestService.find(requestFilter, null).getContent();
Assert.assertEquals(1, requests.size());
VsRequestDto request = requests.get(0);
Assert.assertEquals(USER_ONE_NAME, request.getUid());
Assert.assertEquals(VsOperationType.CREATE, request.getOperationType());
Assert.assertEquals(VsRequestState.IN_PROGRESS, request.getState());
VsAccountDto account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNull("Account must be null, because request was not realized yet!", account);
// We try realize the request
super.logout();
loginService.login(new LoginDto(USER_IMPLEMENTER_NAME, new GuardedString("password")));
String note = helper.createName();
request = requestService.realize(request, note);
Assert.assertEquals(VsRequestState.REALIZED, request.getState());
account = accountService.findByUidSystem(USER_ONE_NAME, system.getId());
Assert.assertNotNull("Account cannot be null, because request was realized!", account);
request = requestService.get(request.getId());
Assert.assertEquals(note, request.getReason());
}
/**
* Method for create role, assign role to system and to user.
*
* @param USER_ONE_NAME
* @param USER_IMPLEMENTER_NAME
* @param ROLE_ONE_NAME
* @return
*/
public SysSystemDto assignRoleSystem(SysSystemDto system, IdmIdentityDto userOne, String roleOneName) {
IdmRoleDto roleOne = helper.createRole(roleOneName);
// Create policy for vs evaluator and user role
helper.createAuthorizationPolicy(this.createDefaultRole().getId(),
VirtualSystemGroupPermission.VSREQUEST, VsRequest.class, VsRequestByImplementerEvaluator.class,
IdmBasePermission.ADMIN);
// Assign system to role
helper.createRoleSystem(roleOne, system);
helper.assignRoles(helper.getPrimeContract(userOne.getId()), false, roleOne);
return system;
}
public SysSystemDto createVirtualSystem(String userImplementerName, List<String> attributes) {
IdmIdentityDto userImplementer = helper.createIdentity(userImplementerName);
VsSystemDto config = new VsSystemDto();
config.setName("vsSystemOne" + new Date().getTime());
config.setImplementers(ImmutableList.of(userImplementer.getId()));
if (attributes != null) {
config.setAttributes(attributes);
}
SysSystemDto system = helper.createVirtualSystem(config);
Assert.assertNotNull(system);
return system;
}
public void deleteAll(String userOneName, String userOneChangedName, String userImplementerName,
String roleOneName) {
if (identityService.getByUsername(userOneName) != null) {
identityService.delete(identityService.getByUsername(userOneName));
}
if (identityService.getByUsername(userOneChangedName) != null) {
identityService.delete(identityService.getByUsername(userOneChangedName));
}
if (identityService.getByUsername(userImplementerName) != null) {
identityService.delete(identityService.getByUsername(userImplementerName));
}
if (roleService.getByCode(roleOneName) != null) {
roleService.delete(roleService.getByCode(roleOneName));
}
}
/**
* Method check if exists role with name/code defined in {@link InitDemoData#DEFAULT_ROLE_NAME}
* @return
*/
public IdmRoleDto createDefaultRole() {
IdmRoleDto defaultRole = roleService.getByCode(InitDemoData.DEFAULT_ROLE_NAME);
if (defaultRole != null) {
return defaultRole;
}
//
defaultRole = new IdmRoleDto();
defaultRole.setCode(InitDemoData.DEFAULT_ROLE_NAME);
return roleService.save(defaultRole);
}
}
|
Update DefaultVsRequestServiceIntegrationTest.java
|
Realization/backend/vs/src/test/java/eu/bcvsolutions/idm/vs/service/impl/DefaultVsRequestServiceIntegrationTest.java
|
Update DefaultVsRequestServiceIntegrationTest.java
|
|
Java
|
mit
|
9e17744c02ad2794a0c63b91ecf867d5aa262366
| 0
|
Techjar/LEDCubeManager,Techjar/LEDCubeManager
|
package com.techjar.ledcm;
import com.techjar.ledcm.gui.screen.ScreenMainControl;
import com.techjar.ledcm.hardware.ArduinoLEDManager;
import com.techjar.ledcm.hardware.CommThread;
import com.techjar.ledcm.hardware.LEDArray;
import com.techjar.ledcm.hardware.LEDManager;
import com.techjar.ledcm.hardware.LEDUtil;
import com.techjar.ledcm.hardware.SerialPortHandler;
import com.techjar.ledcm.hardware.SpectrumAnalyzer;
import com.techjar.ledcm.hardware.TLC5940LEDManager;
import com.techjar.ledcm.hardware.TestHugeLEDManager;
import com.techjar.ledcm.hardware.TestReallyHugeLEDManager;
import com.techjar.ledcm.hardware.animation.*;
import com.techjar.ledcm.util.Angle;
import com.techjar.ledcm.util.AxisAlignedBB;
import com.techjar.ledcm.util.Dimension3D;
import com.techjar.ledcm.util.Direction;
import com.techjar.ledcm.util.LEDCubeOctreeNode;
import com.techjar.ledcm.util.Model;
import com.techjar.ledcm.util.Quaternion;
import com.techjar.ledcm.util.Util;
import com.techjar.ledcm.util.Vector3;
import com.techjar.ledcm.util.input.InputBinding;
import com.techjar.ledcm.util.input.InputBindingManager;
import com.techjar.ledcm.util.input.InputInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import org.lwjgl.input.Controller;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import org.lwjgl.util.Color;
/**
*
* @author Techjar
*/
public class LEDCube {
private Map<String, Animation> animations = new HashMap<>();
private List<String> animationNames = new ArrayList<>();
private LEDManager ledManager;
private LEDCubeOctreeNode[] octrees;
private int ledSpaceMult = 8;
private boolean drawClick;
private boolean postInited;
private Vector3 cursorTrace;
@Getter private boolean trueColor;
@Getter private CommThread commThread;
@Getter private SpectrumAnalyzer spectrumAnalyzer;
@Getter private Color paintColor = new Color(255, 255, 255);
@Getter private boolean[] highlight;
@Getter private Vector3 paintSize = new Vector3(0, 0, 0);
@Getter @Setter private int layerIsolation = 0;
@Getter @Setter private int selectedLayer = 0;
@Getter private Model model;
public LEDCube() {
ledManager = new ArduinoLEDManager(4, false);
//ledManager = new TLC5940LEDManager(true);
//ledManager = new TestHugeLEDManager(true);
//ledManager = new TestReallyHugeLEDManager(true);
highlight = new boolean[ledManager.getLEDCount()];
model = LEDCubeManager.getModelManager().getModel("led.model");
initOctree();
initBindings();
/*for (int i = 0; i < 64; i++) {
double j = i;
LogHelper.info(Math.round(MathHelper.cie1931(j/63)*63));
}*/
}
public void postInit() throws IOException {
if (postInited) throw new IllegalStateException();
postInited = true;
spectrumAnalyzer = new SpectrumAnalyzer();
commThread = new CommThread(new SerialPortHandler(LEDCubeManager.getSerialPortName()));
commThread.start();
LEDCubeManager.getCamera().setPosition(new Vector3(-80, 85, 28));
LEDCubeManager.getCamera().setAngle(new Angle(-31, -90, 0));
}
private void computeLEDHighlight() {
for (int i = 0; i < highlight.length; i++) {
highlight[i] = false;
}
if (cursorTrace != null && !Mouse.isGrabbed()) {
Dimension3D dim = ledManager.getDimensions();
for (int x = (int)cursorTrace.getX(); x <= Math.min((int)cursorTrace.getX() + (int)paintSize.getX(), dim.x - 1); x++) {
for (int y = (int)cursorTrace.getY(); y <= Math.min((int)cursorTrace.getY() + (int)paintSize.getY(), dim.y - 1); y++) {
for (int z = (int)cursorTrace.getZ(); z <= Math.min((int)cursorTrace.getZ() + (int)paintSize.getZ(), dim.z - 1); z++) {
if (isLEDWithinIsolation(x, y, z)) {
highlight[Util.encodeCubeVector(x, y, z)] = true;
}
}
}
}
}
}
private void paintLEDHighlight() {
Dimension3D dim = ledManager.getDimensions();
for (int x = 0; x < dim.x; x++) {
for (int y = 0; y < dim.y; y++) {
for (int z = 0; z < dim.z; z++) {
if (highlight[Util.encodeCubeVector(x, y, z)]) {
ledManager.setLEDColor(x, y, z, paintColor);
}
}
}
}
}
public void preProcess() {
cursorTrace = traceCursorToLED();
computeLEDHighlight();
}
public boolean processKeyboardEvent() {
if (Keyboard.getEventKeyState()) {
//if (Keyboard.getEventKey() == Keyboard.KEY_F11) setFullscreen(!fullscreen);
}
return true;
}
public boolean processMouseEvent() {
if (!Mouse.isGrabbed() && drawClick) {
paintLEDHighlight();
}
return !drawClick;
}
public boolean processControllerEvent(Controller controller) {
return true;
}
public void update(float delta) {
// we don't do anything here... yet...
}
public int render() {
int faceCount = 0;
float mult = ledSpaceMult;
Dimension3D dim = ledManager.getDimensions();
LEDArray ledArray = ledManager.getLEDArray();
for (int y = 0; y < dim.y; y++) {
for (int z = 0; z < dim.z; z++) {
for (int x = 0; x < dim.x; x++) {
if (isLEDWithinIsolation(x, y, z)) {
Vector3 pos = new Vector3(z * mult, y * mult, x * mult);
Color color;
if (trueColor) {
Color ledColor = ledArray.getLEDColorReal(x, y, z);
color = new Color(Math.round(ledColor.getRed() * ledManager.getFactor()), Math.round(ledColor.getGreen() * ledManager.getFactor()), Math.round(ledColor.getBlue() * ledManager.getFactor()));
} else color = ledArray.getLEDColor(x, y, z);
faceCount += model.render(pos, new Quaternion(), color);
}
}
}
}
for (int y = 0; y < dim.y; y++) {
for (int x = 0; x < dim.x; x++) {
for (int z = 0; z < dim.z; z++) {
if (highlight[Util.encodeCubeVector(x, y, z)]) {
if (isLEDWithinIsolation(x, y, z)) {
Vector3 pos = new Vector3(z * mult, y * mult, x * mult);
faceCount += model.render(pos, new Quaternion(), new Color(paintColor.getRed(), paintColor.getGreen(), paintColor.getBlue(), 32), new Vector3(1.2F, 1.2F, 1.2F));
}
}
}
}
}
return faceCount;
}
public LEDManager getLEDManager() {
return ledManager;
}
public boolean isLEDWithinIsolation(Vector3 vector) {
return isLEDWithinIsolation((int)vector.getX(), (int)vector.getY(), (int)vector.getZ());
}
public void setPaintColor(Color color) {
paintColor.set(color.getRed(), color.getGreen(), color.getBlue());
LEDCubeManager.getInstance().getScreenMainControl().redColorSlider.setValue(color.getRed() / 255F);
LEDCubeManager.getInstance().getScreenMainControl().greenColorSlider.setValue(color.getGreen() / 255F);
LEDCubeManager.getInstance().getScreenMainControl().blueColorSlider.setValue(color.getBlue() / 255F);
}
private void initBindings() {
InputBindingManager.addBinding(new InputBinding("reloadanimation", "Reload Current", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_R)) {
@Override
public boolean onPressed() {
if (commThread.getCurrentSequence() == null) {
Animation anim = commThread.getCurrentAnimation();
try {
animations.put(anim.getName(), anim.getClass().newInstance());
commThread.setCurrentAnimation(animations.get(anim.getName()));
} catch (Exception ex) {
ex.printStackTrace();
}
return false;
}
return true;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("reloadallanimations", "Reload All", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_Y)) {
@Override
public boolean onPressed() {
if (commThread.getCurrentSequence() == null) {
loadAnimations();
return false;
}
return true;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("resetcamera", "Reset Camera", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_F)) {
@Override
public boolean onPressed() {
LEDCubeManager.getCamera().setPosition(new Vector3(-80, 85, 28));
LEDCubeManager.getCamera().setAngle(new Angle(-31, -90, 0));
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("togglecolor", "Toggle Color", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_H)) {
@Override
public boolean onPressed() {
trueColor = !trueColor;
float increment = trueColor ? 1F / ledManager.getResolution() : 1F / 255F;
LEDCubeManager.getInstance().getScreenMainControl().redColorSlider.setIncrement(increment);
LEDCubeManager.getInstance().getScreenMainControl().greenColorSlider.setIncrement(increment);
LEDCubeManager.getInstance().getScreenMainControl().blueColorSlider.setIncrement(increment);
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("clearleds", "Clear LEDs", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_C)) {
@Override
public boolean onPressed() {
LEDUtil.clear(ledManager);
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("freezeanimation", "Freeze", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_X)) {
@Override
public boolean onPressed() {
commThread.setFrozen(!commThread.isFrozen());
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("paintleds", "Paint LEDs", true, new InputInfo(InputInfo.Type.MOUSE, 0)) {
@Override
public boolean onPressed() {
if (!Mouse.isGrabbed()) {
drawClick = true;
paintLEDHighlight();
return false;
}
return true;
}
@Override
public boolean onReleased() {
drawClick = false;
return true;
}
});
InputBindingManager.addBinding(new InputBinding("floodfill", "Flood Fill", true, new InputInfo(InputInfo.Type.MOUSE, 1)) {
@Override
public boolean onPressed() {
if (!Mouse.isGrabbed()) {
if (cursorTrace != null) {
Dimension3D dim = ledManager.getDimensions();
LEDArray ledArray = ledManager.getLEDArray();
Color targetColor = ledArray.getLEDColor((int)cursorTrace.getX(), (int)cursorTrace.getY(), (int)cursorTrace.getZ());
if (!targetColor.equals(paintColor)) {
boolean[] processed = new boolean[ledManager.getLEDCount()];
LinkedList<Vector3> stack = new LinkedList<>();
stack.push(cursorTrace);
while (!stack.isEmpty()) {
Vector3 current = stack.pop();
Color color = ledArray.getLEDColor((int)current.getX(), (int)current.getY(), (int)current.getZ());
if (color.equals(targetColor) && isLEDWithinIsolation(current)) {
ledManager.setLEDColor((int)current.getX(), (int)current.getY(), (int)current.getZ(), paintColor);
processed[Util.encodeCubeVector(current)] = true;
for (int i = 0; i < 6; i++) {
Vector3 offset = Direction.values()[i].getVector();
Vector3 node = current.add(offset);
if (node.getX() >= 0 && node.getX() < dim.x && node.getY() >= 0 && node.getY() < dim.y && node.getZ() >= 0 && node.getZ() < dim.z && !processed[Util.encodeCubeVector(node)]) {
stack.push(node);
}
}
}
}
}
return false;
}
}
return true;
}
@Override
public boolean onReleased() {
return true;
}
});
}
private void initOctree() {
Dimension3D dim = ledManager.getDimensions();
if (/*dim.x != dim.y || dim.x != dim.z || dim.y != dim.z ||*/ !Util.isPowerOfTwo(dim.x) || !Util.isPowerOfTwo(dim.y) || !Util.isPowerOfTwo(dim.z)) return; // Non-cubes and non-powers-of-two need special handling here
int minDim = Math.min(dim.x, Math.min(dim.y, dim.z));
float octreeSize = ledSpaceMult * minDim;
ArrayList<LEDCubeOctreeNode> list = new ArrayList<>();
for (int x = 0; x < dim.x; x += minDim) {
for (int y = 0; y < dim.y; y += minDim) {
for (int z = 0; z < dim.z; z += minDim) {
Vector3 offset = new Vector3(octreeSize * (x / minDim), octreeSize * (y / minDim), octreeSize * (z / minDim));
LEDCubeOctreeNode octree = new LEDCubeOctreeNode(new AxisAlignedBB(new Vector3(-ledSpaceMult / 2, -ledSpaceMult / 2, -ledSpaceMult / 2).add(offset), new Vector3(octreeSize + (ledSpaceMult / 2), octreeSize + (ledSpaceMult / 2), octreeSize + (ledSpaceMult / 2)).add(offset)));
recursiveFillOctree(octree, octreeSize / 2, minDim, new Vector3(x, y, z));
list.add(octree);
}
}
}
list.toArray(octrees = new LEDCubeOctreeNode[list.size()]);
}
private void recursiveFillOctree(LEDCubeOctreeNode node, float size, int count, Vector3 ledPos) {
AxisAlignedBB nodeAABB = node.getAABB();
for (int i = 0; i < 8; i++) {
int x = (i & 1);
int y = ((i >> 1) & 1);
int z = ((i >> 2) & 1);
float xOffset = x * size;
float yOffset = y * size;
float zOffset = z * size;
if (count > 1) {
LEDCubeOctreeNode newNode = new LEDCubeOctreeNode(new AxisAlignedBB(nodeAABB.getMinPoint().add(new Vector3(xOffset, yOffset, zOffset)), nodeAABB.getMinPoint().add(new Vector3(xOffset + size, yOffset + size, zOffset + size))));
node.setNode(i, newNode);
recursiveFillOctree(newNode, size / 2, count / 2, ledPos.add(new Vector3((count / 2) * x, (count / 2) * y, (count / 2) * z)));
} else {
AxisAlignedBB modelAABB = model.getAABB();
node.setNode(i, new LEDCubeOctreeNode(new AxisAlignedBB(modelAABB.getMinPoint().add(ledPos.multiply(ledSpaceMult)), modelAABB.getMaxPoint().add(ledPos.multiply(ledSpaceMult))), new Vector3(ledPos.getZ(), ledPos.getY(), ledPos.getX())));
}
}
}
private Vector3 recursiveIntersectOctree(LEDCubeOctreeNode node, Vector3 point) {
if (node.getNode(0) != null) {
for (int i = 0; i < 8; i++) {
LEDCubeOctreeNode nextNode = node.getNode(i);
if (nextNode.getAABB().containsPoint(point)) {
Vector3 ret = recursiveIntersectOctree(nextNode, point);
if (ret != null) return ret;
}
}
} else {
return node.getLEDPosition();
}
return null;
}
public Vector3 traceCursorToLED() {
Vector3[] ray = LEDCubeManager.getInstance().getCursorRay();
Vector3 position = ray[0];
Vector3 direction = ray[1].multiply(0.5F);
float mult = ledSpaceMult;
Dimension3D dim = ledManager.getDimensions();
Model model = LEDCubeManager.getModelManager().getModel("led.model");
for (float step = 1; step < 5000; step += 2) {
Vector3 rayPos = position.add(direction.multiply(step));
if (octrees == null) {
for (int y = 0; y < dim.y; y++) {
for (int z = 0; z < dim.z; z++) {
for (int x = 0; x < dim.x; x++) {
float xx = z * mult;
float yy = y * mult;
float zz = x * mult;
Vector3 pos = new Vector3(xx, yy, zz);
if (model.getAABB().containsPoint(pos, rayPos) && isLEDWithinIsolation(x, y, z)) {
return new Vector3(x, y, z);
}
}
}
}
} else {
Vector3 ret = null;
for (int i = 0; i < octrees.length; i++) {
ret = recursiveIntersectOctree(octrees[i], rayPos);
if (ret != null) break;
}
if (ret != null) {
if (isLEDWithinIsolation((int)ret.getX(), (int)ret.getY(), (int)ret.getZ())) {
return ret;
}
}
}
}
return null;
}
public boolean isLEDWithinIsolation(int x, int y, int z) {
switch (layerIsolation) {
case 1: return x == selectedLayer;
case 2: return y == selectedLayer;
case 3: return z == selectedLayer;
}
return true;
}
public void loadAnimations() {
animations.clear();
animationNames.clear();
addAnimation(new AnimationNone());
addAnimation(new AnimationSpectrumBars());
addAnimation(new AnimationSpectrumShooters());
//addAnimation(new AnimationIndividualTest());
addAnimation(new AnimationStaticFill());
addAnimation(new AnimationPulsate());
addAnimation(new AnimationPulsateHue());
addAnimation(new AnimationRandomize());
addAnimation(new AnimationRain());
addAnimation(new AnimationMatrix());
//addAnimation(new AnimationFolder());
addAnimation(new AnimationTwinkle());
addAnimation(new AnimationBlink());
addAnimation(new AnimationStrobe());
addAnimation(new AnimationSnake());
addAnimation(new AnimationSnakeBattle());
addAnimation(new AnimationSnakeInfinite());
addAnimation(new AnimationScrollers());
addAnimation(new AnimationProgressiveFill());
addAnimation(new AnimationSine());
addAnimation(new AnimationSineDouble());
addAnimation(new AnimationStacker());
addAnimation(new AnimationRainbowStacker());
addAnimation(new AnimationCandyCaneStacker());
addAnimation(new AnimationDrain());
addAnimation(new AnimationFaucet());
addAnimation(new AnimationMultiFaucet());
addAnimation(new AnimationFaucetFill());
addAnimation(new AnimationFaucetFillRainbow());
addAnimation(new AnimationSlidingBoxes());
for (Animation anim : animations.values()) {
anim.postLoadInitOptions();
}
if (LEDCubeManager.getInstance().getScreenMainControl() != null) {
LEDCubeManager.getInstance().getScreenMainControl().populateAnimationList();
}
}
private void addAnimation(Animation animation) {
animations.put(animation.getName(), animation);
if (!animation.isHidden()) animationNames.add(animation.getName());
}
public Map<String, Animation> getAnimations() {
return Collections.unmodifiableMap(animations);
}
public List<String> getAnimationNames() {
return Collections.unmodifiableList(animationNames);
}
public Animation getAnimationByClassName(String name) {
for (Animation animation : animations.values()) {
if (name.equals(animation.getClass().getSimpleName())) {
return animation;
}
}
return null;
}
}
|
src/com/techjar/ledcm/LEDCube.java
|
package com.techjar.ledcm;
import com.techjar.ledcm.gui.screen.ScreenMainControl;
import com.techjar.ledcm.hardware.ArduinoLEDManager;
import com.techjar.ledcm.hardware.CommThread;
import com.techjar.ledcm.hardware.LEDArray;
import com.techjar.ledcm.hardware.LEDManager;
import com.techjar.ledcm.hardware.LEDUtil;
import com.techjar.ledcm.hardware.SerialPortHandler;
import com.techjar.ledcm.hardware.SpectrumAnalyzer;
import com.techjar.ledcm.hardware.TLC5940LEDManager;
import com.techjar.ledcm.hardware.TestHugeLEDManager;
import com.techjar.ledcm.hardware.TestReallyHugeLEDManager;
import com.techjar.ledcm.hardware.animation.*;
import com.techjar.ledcm.util.Angle;
import com.techjar.ledcm.util.AxisAlignedBB;
import com.techjar.ledcm.util.Dimension3D;
import com.techjar.ledcm.util.Direction;
import com.techjar.ledcm.util.LEDCubeOctreeNode;
import com.techjar.ledcm.util.Model;
import com.techjar.ledcm.util.Quaternion;
import com.techjar.ledcm.util.Util;
import com.techjar.ledcm.util.Vector3;
import com.techjar.ledcm.util.input.InputBinding;
import com.techjar.ledcm.util.input.InputBindingManager;
import com.techjar.ledcm.util.input.InputInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import lombok.Getter;
import lombok.Setter;
import org.lwjgl.input.Controller;
import org.lwjgl.input.Keyboard;
import org.lwjgl.input.Mouse;
import org.lwjgl.util.Color;
/**
*
* @author Techjar
*/
public class LEDCube {
private Map<String, Animation> animations = new HashMap<>();
private List<String> animationNames = new ArrayList<>();
private LEDManager ledManager;
private LEDCubeOctreeNode[] octrees;
private int ledSpaceMult = 8;
private boolean drawClick;
private boolean postInited;
private Vector3 cursorTrace;
@Getter private boolean trueColor;
@Getter private CommThread commThread;
@Getter private SpectrumAnalyzer spectrumAnalyzer;
@Getter private Color paintColor = new Color(255, 255, 255);
@Getter private boolean[] highlight;
@Getter private Vector3 paintSize = new Vector3(0, 0, 0);
@Getter @Setter private int layerIsolation = 0;
@Getter @Setter private int selectedLayer = 0;
@Getter private Model model;
public LEDCube() {
ledManager = new ArduinoLEDManager(4, false);
//ledManager = new TLC5940LEDManager(true);
//ledManager = new TestHugeLEDManager(true);
//ledManager = new TestReallyHugeLEDManager(true);
highlight = new boolean[ledManager.getLEDCount()];
model = LEDCubeManager.getModelManager().getModel("led.model");
initOctree();
initBindings();
/*for (int i = 0; i < 64; i++) {
double j = i;
LogHelper.info(Math.round(MathHelper.cie1931(j/63)*63));
}*/
}
public void postInit() throws IOException {
if (postInited) throw new IllegalStateException();
postInited = true;
spectrumAnalyzer = new SpectrumAnalyzer();
commThread = new CommThread(new SerialPortHandler(LEDCubeManager.getSerialPortName()));
commThread.start();
LEDCubeManager.getCamera().setPosition(new Vector3(-80, 85, 28));
LEDCubeManager.getCamera().setAngle(new Angle(-31, -90, 0));
}
private void computeLEDHighlight() {
for (int i = 0; i < highlight.length; i++) {
highlight[i] = false;
}
if (cursorTrace != null) {
Dimension3D dim = ledManager.getDimensions();
for (int x = (int)cursorTrace.getX(); x <= Math.min((int)cursorTrace.getX() + (int)paintSize.getX(), dim.x - 1); x++) {
for (int y = (int)cursorTrace.getY(); y <= Math.min((int)cursorTrace.getY() + (int)paintSize.getY(), dim.y - 1); y++) {
for (int z = (int)cursorTrace.getZ(); z <= Math.min((int)cursorTrace.getZ() + (int)paintSize.getZ(), dim.z - 1); z++) {
if (isLEDWithinIsolation(x, y, z)) {
highlight[Util.encodeCubeVector(x, y, z)] = true;
}
}
}
}
}
}
private void paintLEDHighlight() {
Dimension3D dim = ledManager.getDimensions();
for (int x = 0; x < dim.x; x++) {
for (int y = 0; y < dim.y; y++) {
for (int z = 0; z < dim.z; z++) {
if (highlight[Util.encodeCubeVector(x, y, z)]) {
ledManager.setLEDColor(x, y, z, paintColor);
}
}
}
}
}
public void preProcess() {
cursorTrace = traceCursorToLED();
computeLEDHighlight();
}
public boolean processKeyboardEvent() {
if (Keyboard.getEventKeyState()) {
//if (Keyboard.getEventKey() == Keyboard.KEY_F11) setFullscreen(!fullscreen);
}
return true;
}
public boolean processMouseEvent() {
if (!Mouse.isGrabbed() && drawClick) {
paintLEDHighlight();
}
return !drawClick;
}
public boolean processControllerEvent(Controller controller) {
return true;
}
public void update(float delta) {
// we don't do anything here... yet...
}
public int render() {
int faceCount = 0;
float mult = ledSpaceMult;
Dimension3D dim = ledManager.getDimensions();
LEDArray ledArray = ledManager.getLEDArray();
for (int y = 0; y < dim.y; y++) {
for (int z = 0; z < dim.z; z++) {
for (int x = 0; x < dim.x; x++) {
if (isLEDWithinIsolation(x, y, z)) {
Vector3 pos = new Vector3(z * mult, y * mult, x * mult);
Color color;
if (trueColor) {
Color ledColor = ledArray.getLEDColorReal(x, y, z);
color = new Color(Math.round(ledColor.getRed() * ledManager.getFactor()), Math.round(ledColor.getGreen() * ledManager.getFactor()), Math.round(ledColor.getBlue() * ledManager.getFactor()));
} else color = ledArray.getLEDColor(x, y, z);
faceCount += model.render(pos, new Quaternion(), color);
}
}
}
}
for (int y = 0; y < dim.y; y++) {
for (int x = 0; x < dim.x; x++) {
for (int z = 0; z < dim.z; z++) {
if (highlight[Util.encodeCubeVector(x, y, z)]) {
if (isLEDWithinIsolation(x, y, z)) {
Vector3 pos = new Vector3(z * mult, y * mult, x * mult);
faceCount += model.render(pos, new Quaternion(), new Color(paintColor.getRed(), paintColor.getGreen(), paintColor.getBlue(), 32), new Vector3(1.2F, 1.2F, 1.2F));
}
}
}
}
}
return faceCount;
}
public LEDManager getLEDManager() {
return ledManager;
}
public boolean isLEDWithinIsolation(Vector3 vector) {
return isLEDWithinIsolation((int)vector.getX(), (int)vector.getY(), (int)vector.getZ());
}
public void setPaintColor(Color color) {
paintColor.set(color.getRed(), color.getGreen(), color.getBlue());
LEDCubeManager.getInstance().getScreenMainControl().redColorSlider.setValue(color.getRed() / 255F);
LEDCubeManager.getInstance().getScreenMainControl().greenColorSlider.setValue(color.getGreen() / 255F);
LEDCubeManager.getInstance().getScreenMainControl().blueColorSlider.setValue(color.getBlue() / 255F);
}
private void initBindings() {
InputBindingManager.addBinding(new InputBinding("reloadanimation", "Reload Current", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_R)) {
@Override
public boolean onPressed() {
if (commThread.getCurrentSequence() == null) {
Animation anim = commThread.getCurrentAnimation();
try {
animations.put(anim.getName(), anim.getClass().newInstance());
commThread.setCurrentAnimation(animations.get(anim.getName()));
} catch (Exception ex) {
ex.printStackTrace();
}
return false;
}
return true;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("reloadallanimations", "Reload All", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_Y)) {
@Override
public boolean onPressed() {
if (commThread.getCurrentSequence() == null) {
loadAnimations();
return false;
}
return true;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("resetcamera", "Reset Camera", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_F)) {
@Override
public boolean onPressed() {
LEDCubeManager.getCamera().setPosition(new Vector3(-80, 85, 28));
LEDCubeManager.getCamera().setAngle(new Angle(-31, -90, 0));
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("togglecolor", "Toggle Color", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_H)) {
@Override
public boolean onPressed() {
trueColor = !trueColor;
float increment = trueColor ? 1F / ledManager.getResolution() : 1F / 255F;
LEDCubeManager.getInstance().getScreenMainControl().redColorSlider.setIncrement(increment);
LEDCubeManager.getInstance().getScreenMainControl().greenColorSlider.setIncrement(increment);
LEDCubeManager.getInstance().getScreenMainControl().blueColorSlider.setIncrement(increment);
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("clearleds", "Clear LEDs", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_C)) {
@Override
public boolean onPressed() {
LEDUtil.clear(ledManager);
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("freezeanimation", "Freeze", true, new InputInfo(InputInfo.Type.KEYBOARD, Keyboard.KEY_X)) {
@Override
public boolean onPressed() {
commThread.setFrozen(!commThread.isFrozen());
return false;
}
@Override
public boolean onReleased() {
return true;
}
});
InputBindingManager.addBinding(new InputBinding("paintleds", "Paint LEDs", true, new InputInfo(InputInfo.Type.MOUSE, 0)) {
@Override
public boolean onPressed() {
if (!Mouse.isGrabbed()) {
drawClick = true;
paintLEDHighlight();
return false;
}
return true;
}
@Override
public boolean onReleased() {
drawClick = false;
return true;
}
});
InputBindingManager.addBinding(new InputBinding("floodfill", "Flood Fill", true, new InputInfo(InputInfo.Type.MOUSE, 1)) {
@Override
public boolean onPressed() {
if (!Mouse.isGrabbed()) {
if (cursorTrace != null) {
Dimension3D dim = ledManager.getDimensions();
LEDArray ledArray = ledManager.getLEDArray();
Color targetColor = ledArray.getLEDColor((int)cursorTrace.getX(), (int)cursorTrace.getY(), (int)cursorTrace.getZ());
if (!targetColor.equals(paintColor)) {
boolean[] processed = new boolean[ledManager.getLEDCount()];
LinkedList<Vector3> stack = new LinkedList<>();
stack.push(cursorTrace);
while (!stack.isEmpty()) {
Vector3 current = stack.pop();
Color color = ledArray.getLEDColor((int)current.getX(), (int)current.getY(), (int)current.getZ());
if (color.equals(targetColor) && isLEDWithinIsolation(current)) {
ledManager.setLEDColor((int)current.getX(), (int)current.getY(), (int)current.getZ(), paintColor);
processed[Util.encodeCubeVector(current)] = true;
for (int i = 0; i < 6; i++) {
Vector3 offset = Direction.values()[i].getVector();
Vector3 node = current.add(offset);
if (node.getX() >= 0 && node.getX() < dim.x && node.getY() >= 0 && node.getY() < dim.y && node.getZ() >= 0 && node.getZ() < dim.z && !processed[Util.encodeCubeVector(node)]) {
stack.push(node);
}
}
}
}
}
return false;
}
}
return true;
}
@Override
public boolean onReleased() {
return true;
}
});
}
private void initOctree() {
Dimension3D dim = ledManager.getDimensions();
if (/*dim.x != dim.y || dim.x != dim.z || dim.y != dim.z ||*/ !Util.isPowerOfTwo(dim.x) || !Util.isPowerOfTwo(dim.y) || !Util.isPowerOfTwo(dim.z)) return; // Non-cubes and non-powers-of-two need special handling here
int minDim = Math.min(dim.x, Math.min(dim.y, dim.z));
float octreeSize = ledSpaceMult * minDim;
ArrayList<LEDCubeOctreeNode> list = new ArrayList<>();
for (int x = 0; x < dim.x; x += minDim) {
for (int y = 0; y < dim.y; y += minDim) {
for (int z = 0; z < dim.z; z += minDim) {
Vector3 offset = new Vector3(octreeSize * (x / minDim), octreeSize * (y / minDim), octreeSize * (z / minDim));
LEDCubeOctreeNode octree = new LEDCubeOctreeNode(new AxisAlignedBB(new Vector3(-ledSpaceMult / 2, -ledSpaceMult / 2, -ledSpaceMult / 2).add(offset), new Vector3(octreeSize + (ledSpaceMult / 2), octreeSize + (ledSpaceMult / 2), octreeSize + (ledSpaceMult / 2)).add(offset)));
recursiveFillOctree(octree, octreeSize / 2, minDim, new Vector3(x, y, z));
list.add(octree);
}
}
}
list.toArray(octrees = new LEDCubeOctreeNode[list.size()]);
}
private void recursiveFillOctree(LEDCubeOctreeNode node, float size, int count, Vector3 ledPos) {
AxisAlignedBB nodeAABB = node.getAABB();
for (int i = 0; i < 8; i++) {
int x = (i & 1);
int y = ((i >> 1) & 1);
int z = ((i >> 2) & 1);
float xOffset = x * size;
float yOffset = y * size;
float zOffset = z * size;
if (count > 1) {
LEDCubeOctreeNode newNode = new LEDCubeOctreeNode(new AxisAlignedBB(nodeAABB.getMinPoint().add(new Vector3(xOffset, yOffset, zOffset)), nodeAABB.getMinPoint().add(new Vector3(xOffset + size, yOffset + size, zOffset + size))));
node.setNode(i, newNode);
recursiveFillOctree(newNode, size / 2, count / 2, ledPos.add(new Vector3((count / 2) * x, (count / 2) * y, (count / 2) * z)));
} else {
AxisAlignedBB modelAABB = model.getAABB();
node.setNode(i, new LEDCubeOctreeNode(new AxisAlignedBB(modelAABB.getMinPoint().add(ledPos.multiply(ledSpaceMult)), modelAABB.getMaxPoint().add(ledPos.multiply(ledSpaceMult))), new Vector3(ledPos.getZ(), ledPos.getY(), ledPos.getX())));
}
}
}
private Vector3 recursiveIntersectOctree(LEDCubeOctreeNode node, Vector3 point) {
if (node.getNode(0) != null) {
for (int i = 0; i < 8; i++) {
LEDCubeOctreeNode nextNode = node.getNode(i);
if (nextNode.getAABB().containsPoint(point)) {
Vector3 ret = recursiveIntersectOctree(nextNode, point);
if (ret != null) return ret;
}
}
} else {
return node.getLEDPosition();
}
return null;
}
public Vector3 traceCursorToLED() {
Vector3[] ray = LEDCubeManager.getInstance().getCursorRay();
Vector3 position = ray[0];
Vector3 direction = ray[1].multiply(0.5F);
float mult = ledSpaceMult;
Dimension3D dim = ledManager.getDimensions();
Model model = LEDCubeManager.getModelManager().getModel("led.model");
for (float step = 1; step < 5000; step += 2) {
Vector3 rayPos = position.add(direction.multiply(step));
if (octrees == null) {
for (int y = 0; y < dim.y; y++) {
for (int z = 0; z < dim.z; z++) {
for (int x = 0; x < dim.x; x++) {
float xx = z * mult;
float yy = y * mult;
float zz = x * mult;
Vector3 pos = new Vector3(xx, yy, zz);
if (model.getAABB().containsPoint(pos, rayPos) && isLEDWithinIsolation(x, y, z)) {
return new Vector3(x, y, z);
}
}
}
}
} else {
Vector3 ret = null;
for (int i = 0; i < octrees.length; i++) {
ret = recursiveIntersectOctree(octrees[i], rayPos);
if (ret != null) break;
}
if (ret != null) {
if (isLEDWithinIsolation((int)ret.getX(), (int)ret.getY(), (int)ret.getZ())) {
return ret;
}
}
}
}
return null;
}
public boolean isLEDWithinIsolation(int x, int y, int z) {
switch (layerIsolation) {
case 1: return x == selectedLayer;
case 2: return y == selectedLayer;
case 3: return z == selectedLayer;
}
return true;
}
public void loadAnimations() {
animations.clear();
animationNames.clear();
addAnimation(new AnimationNone());
addAnimation(new AnimationSpectrumBars());
addAnimation(new AnimationSpectrumShooters());
//addAnimation(new AnimationIndividualTest());
addAnimation(new AnimationStaticFill());
addAnimation(new AnimationPulsate());
addAnimation(new AnimationPulsateHue());
addAnimation(new AnimationRandomize());
addAnimation(new AnimationRain());
addAnimation(new AnimationMatrix());
//addAnimation(new AnimationFolder());
addAnimation(new AnimationTwinkle());
addAnimation(new AnimationBlink());
addAnimation(new AnimationStrobe());
addAnimation(new AnimationSnake());
addAnimation(new AnimationSnakeBattle());
addAnimation(new AnimationSnakeInfinite());
addAnimation(new AnimationScrollers());
addAnimation(new AnimationProgressiveFill());
addAnimation(new AnimationSine());
addAnimation(new AnimationSineDouble());
addAnimation(new AnimationStacker());
addAnimation(new AnimationRainbowStacker());
addAnimation(new AnimationCandyCaneStacker());
addAnimation(new AnimationDrain());
addAnimation(new AnimationFaucet());
addAnimation(new AnimationMultiFaucet());
addAnimation(new AnimationFaucetFill());
addAnimation(new AnimationFaucetFillRainbow());
addAnimation(new AnimationSlidingBoxes());
for (Animation anim : animations.values()) {
anim.postLoadInitOptions();
}
if (LEDCubeManager.getInstance().getScreenMainControl() != null) {
LEDCubeManager.getInstance().getScreenMainControl().populateAnimationList();
}
}
private void addAnimation(Animation animation) {
animations.put(animation.getName(), animation);
if (!animation.isHidden()) animationNames.add(animation.getName());
}
public Map<String, Animation> getAnimations() {
return Collections.unmodifiableMap(animations);
}
public List<String> getAnimationNames() {
return Collections.unmodifiableList(animationNames);
}
public Animation getAnimationByClassName(String name) {
for (Animation animation : animations.values()) {
if (name.equals(animation.getClass().getSimpleName())) {
return animation;
}
}
return null;
}
}
|
Fix LEDs highlighting while in camera movement mode
|
src/com/techjar/ledcm/LEDCube.java
|
Fix LEDs highlighting while in camera movement mode
|
|
Java
|
epl-1.0
|
b813f1402526ef8b5d436314a8025936b0d96541
| 0
|
IanMayo/january,SmithRWORNL/january,yannick-mayeur/january,IanMayo/january,IanMayo/january,yannick-mayeur/january,SmithRWORNL/january,SmithRWORNL/january,yannick-mayeur/january,IanMayo/january,yannick-mayeur/january
|
/*-
* Copyright (c) 2012 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.dataset;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import org.apache.commons.math3.complex.Complex;
import org.eclipse.dawnsci.analysis.api.dataset.IDataset;
import org.eclipse.dawnsci.analysis.api.dataset.Slice;
import org.eclipse.dawnsci.analysis.dataset.impl.AbstractDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.BooleanDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Comparisons;
import org.eclipse.dawnsci.analysis.dataset.impl.ComplexDoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Dataset;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetFactory;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetUtils;
import org.eclipse.dawnsci.analysis.dataset.impl.DoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.IndexIterator;
import org.eclipse.dawnsci.analysis.dataset.impl.IntegerDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.LongDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Maths;
import org.eclipse.dawnsci.analysis.dataset.impl.ObjectDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Random;
import org.eclipse.dawnsci.analysis.dataset.impl.StringDataset;
import org.junit.Assert;
import org.junit.Test;
public class AbstractDatasetTest {
@Test
public void testBestDType() {
assertEquals(Dataset.FLOAT32, AbstractDataset.getBestDType(Dataset.INT16, Dataset.FLOAT32));
assertEquals(Dataset.FLOAT64, AbstractDataset.getBestDType(Dataset.INT32, Dataset.FLOAT32));
assertEquals(Dataset.COMPLEX64, AbstractDataset.getBestDType(Dataset.FLOAT32, Dataset.COMPLEX64));
assertEquals(Dataset.COMPLEX128, AbstractDataset.getBestDType(Dataset.INT32, Dataset.COMPLEX64));
}
@Test
public void testCompatibleShapes() {
assertTrue("[] and []", AbstractDataset.areShapesCompatible(new int[] {}, new int[] {}));
assertTrue("[1] and []", AbstractDataset.areShapesCompatible(new int[] {1}, new int[] {}));
assertFalse("[2] and []", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {}));
assertTrue("[2] and [2]", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {2}));
assertTrue("[3] and [3]", AbstractDataset.areShapesCompatible(new int[] {3}, new int[] {3}));
assertTrue("[1,2] and [2]", AbstractDataset.areShapesCompatible(new int[] {1,2}, new int[] {2}));
assertTrue("[2] and [1,2]", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {1,2}));
assertFalse("[10,10] and [10,10,10]", AbstractDataset.areShapesCompatible(new int[] {10,10}, new int[] {10,10,10}));
assertFalse("[10,10,10] and [10,10]", AbstractDataset.areShapesCompatible(new int[] {10,10,10}, new int[] {10,10}));
assertTrue("[2] and [2,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {2,1,1,1}));
assertTrue("[2,1] and [2,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {2,1}, new int[] {2,1,1,1}));
assertFalse("[2,1] and [3,1,1,2]", AbstractDataset.areShapesCompatible(new int[] {2,1}, new int[] {3,1,1,2}));
assertFalse("[2,1] and [3,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {2,1}, new int[] {3,1,1,1}));
assertTrue("[1,2,1] and [2,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {1,2,1}, new int[] {2,1,1,1}));
assertTrue("[1,2,1,3] and [2,1,1,1,3]", AbstractDataset.areShapesCompatible(new int[] {1,2,1,3}, new int[] {2,1,1,1,3}));
assertTrue("[2,1,1] and [1,1,2]", AbstractDataset.areShapesCompatible(new int[] {2,1,1}, new int[] {1,1,2}));
assertFalse("[2,1,1] and [1,1,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,1}, new int[] {1,1,3}));
assertFalse("[2,1,4] and [2,1,1,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,1,3}));
assertFalse("[2,1,4] and [2,1,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,3}));
assertFalse("[2,4] and [2,3]", AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {2,3}));
assertTrue("[2,1,4] and [2,1,4]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,4}));
assertTrue("[2,1,4] and [2,1,1,4]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,1,4}));
assertFalse("[2,4] and [2,4,3]", AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {2,4,3}));
assertFalse("[2,1,4] and [2,4,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,4,3}));
assertTrue(AbstractDataset.areShapesCompatible(new int[] {}, new int[] {}, 0));
assertTrue(AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {3}, 0));
assertFalse(AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {3,4}, 1));
assertTrue(AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {3,4}, 0));
// assertTrue(AbstractDataset.areShapesCompatible(new int[] {}, new int[] {}));
}
@Test
public void testBroadcastCompatibleShapes() {
assertTrue("[] and []", AbstractDataset.areShapesBroadcastCompatible(new int[] {}, new int[] {}));
assertTrue("[1] and []", AbstractDataset.areShapesBroadcastCompatible(new int[] {1}, new int[] {}));
assertTrue("[2] and []", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {}));
assertTrue("[2] and [2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {2}));
assertTrue("[3] and [3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {3}, new int[] {3}));
assertTrue("[1,2] and [2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {1,2}, new int[] {2}));
assertTrue("[2] and [1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {1,2}));
assertTrue("[4,2] and [1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {4,2}, new int[] {1,2}));
assertTrue("[10,10] and [10,10,10]", AbstractDataset.areShapesBroadcastCompatible(new int[] {10,10}, new int[] {10,10,10}));
assertTrue("[10,10,10] and [10,10]", AbstractDataset.areShapesBroadcastCompatible(new int[] {10,10,10}, new int[] {10,10}));
assertTrue("[2] and [2,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {2,1,1,1}));
assertTrue("[2,1] and [2,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1}, new int[] {2,1,1,1}));
assertTrue("[2,1] and [3,1,1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1}, new int[] {3,1,1,2}));
assertTrue("[2,1] and [3,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1}, new int[] {3,1,1,1}));
assertTrue("[1,2,1] and [2,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {1,2,1}, new int[] {2,1,1,1}));
assertTrue("[1,2,1,3] and [2,1,1,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {1,2,1,3}, new int[] {2,1,1,1,3}));
assertTrue("[2,1,1] and [1,1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,1}, new int[] {1,1,2}));
assertTrue("[2,1,1] and [1,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,1}, new int[] {1,1,3}));
assertFalse("[2,1,4] and [2,1,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,1,3}));
assertFalse("[2,1,4] and [2,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,3}));
assertFalse("[2,4] and [2,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,4}, new int[] {2,3}));
assertTrue("[2,1,4] and [2,1,4]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,4}));
assertTrue("[2,1,4] and [2,1,1,4]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,1,4}));
assertFalse("[2,4] and [2,4,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,4}, new int[] {2,4,3}));
assertFalse("[2,1,4] and [2,4,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,4,3}));
}
@Test
public void testMaxMin() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.setShape(3,4);
assertEquals("Max", 11, a.max().doubleValue(), 1e-6);
assertEquals("Max 0", DatasetFactory.createFromObject(new double[] {8,9,10,11}), a.max(0));
assertEquals("Max 1", DatasetFactory.createFromObject(new double[] {3,7,11}), a.max(1));
assertEquals("Max arg", 11, a.argMax());
assertEquals("Max arg 0 ", DatasetFactory.createFromObject(new int[] {2,2,2,2}), a.argMax(0));
assertEquals("Max arg 1 ", DatasetFactory.createFromObject(new int[] {3,3,3}), a.argMax(1));
a.set(Double.NaN, 1, 0);
System.out.println(a.toString(true));
assertTrue("Max", Double.isNaN(a.max().doubleValue()));
assertTrue("Max", !Double.isNaN(a.max(true).doubleValue()));
assertTrue("Max 0", equalsWithNaNs(DatasetFactory.createFromObject(new double[] {Double.NaN,9,10,11}), a.max(0)));
assertTrue("Max 1", equalsWithNaNs(DatasetFactory.createFromObject(new double[] {3,Double.NaN,11}), a.max(1)));
assertEquals("Max arg", 4, a.argMax());
assertEquals("Max arg 0 ", DatasetFactory.createFromObject(new int[] {1,2,2,2}), a.argMax(0));
assertEquals("Max arg 1 ", DatasetFactory.createFromObject(new int[] {3,0,3}), a.argMax(1));
assertEquals("Max", 11, a.max(true).doubleValue(), 1e-6);
assertEquals("Max 0", DatasetFactory.createFromObject(new double[] {8,9,10,11}), a.max(true,0));
assertEquals("Max 1", DatasetFactory.createFromObject(new double[] {3,7,11}), a.max(true,1));
assertEquals("Max arg", 11, a.argMax(true));
assertEquals("Max arg 0 ", DatasetFactory.createFromObject(new int[] {2,2,2,2}), a.argMax(true, 0));
assertEquals("Max arg 1 ", DatasetFactory.createFromObject(new int[] {3,3,3}), a.argMax(true, 1));
a.set(Double.NEGATIVE_INFINITY, 1, 1);
System.out.println(a.toString(true));
assertTrue("Max", Double.isNaN(a.max().doubleValue()));
assertTrue("Max", !Double.isNaN(a.max(true).doubleValue()));
assertTrue("Max", Double.isNaN(a.max(false, true).doubleValue()));
assertTrue("Max", !Double.isNaN(a.max(true, false).doubleValue()));
assertEquals("Max", 11, a.max(true).doubleValue(), 1e-6);
assertTrue("+Max", Double.isNaN(a.positiveMax(false).doubleValue()));
assertTrue("+Max", Double.isNaN(a.positiveMax(false, true).doubleValue()));
assertTrue("+Max", !Double.isNaN(a.positiveMax(true, false).doubleValue()));
assertEquals("+Max", 11, a.positiveMax(true).doubleValue(), 1e-6);
assertTrue("Min", Double.isNaN(a.min().doubleValue()));
assertTrue("Min", !Double.isNaN(a.min(true).doubleValue()));
assertTrue("Min", Double.isNaN(a.min(false, true).doubleValue()));
assertTrue("Min", !Double.isNaN(a.min(true, false).doubleValue()));
assertTrue("Min", !Double.isInfinite(a.min().doubleValue()));
assertTrue("Min", !Double.isInfinite(a.min(true).doubleValue()));
assertTrue("Min", !Double.isInfinite(a.min(false, true).doubleValue()));
assertTrue("Min", Double.isInfinite(a.min(true, false).doubleValue()));
assertEquals("Min", 0, a.min(true).doubleValue(), 1e-6);
assertTrue("+Min", !Double.isNaN(a.positiveMin(true).doubleValue()));
assertTrue("+Min", Double.isNaN(a.positiveMin(false).doubleValue()));
assertTrue("+Min", Double.isNaN(a.positiveMin(false, true).doubleValue()));
assertTrue("+Min", !Double.isNaN(a.positiveMin(true, false).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(true).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(false).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(true, false).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(false, true).doubleValue()));
assertEquals("+Min", 1, a.positiveMin(true).doubleValue(), 1e-6);
// test other code path
Dataset b = DatasetFactory.createRange(12, Dataset.FLOAT64);
b.setShape(3,4);
b.mean(); // trigger summary stats calculation
assertEquals("Max", 11, b.max().doubleValue(), 1e-6);
assertEquals("Max arg", 11, b.argMax());
b.set(Double.NaN, 1, 0);
b.mean(); // trigger summary stats calculation
assertTrue("Max", Double.isNaN(b.max().doubleValue()));
assertEquals("Max arg", 4, b.argMax());
b.mean(true);
assertEquals("Max", 11, b.max(true).doubleValue(), 1e-6);
assertEquals("Max arg", 11, b.argMax(true));
// check strided datasets give same max/min positions
a = DatasetFactory.createRange(12, Dataset.FLOAT64).reshape(3,4);
b = a.getSliceView(new Slice(1, null), new Slice(0, null, 2));
Dataset c = a.getSlice(new Slice(1, null), new Slice(0, null, 2));
Assert.assertEquals(c.argMax(), b.argMax());
Assert.assertEquals(c.argMin(), b.argMin());
}
@Test
public void testGetSpeed() {
final int ITERATIONS = 1000;
Dataset a = DatasetFactory.createRange(1000000, Dataset.FLOAT64);
long start, startN, startP;
start = -System.nanoTime();
for (int i = 0; i < 10; i++) {
a.getDouble(i);
}
start += System.nanoTime();
start = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(i);
}
start += System.nanoTime();
startN = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(new int[] {i});
}
startN += System.nanoTime();
startP = -System.nanoTime();
int[] pos = new int[1];
for (int i = 0; i < ITERATIONS; i++) {
pos[0] = i;
a.getDouble(pos);
}
startP += System.nanoTime();
System.out.printf("Get 1D double took %gus (cf %gus and %gus)\n", start*1e-3/ITERATIONS, startN*1e-3/ITERATIONS, startP*1e-3/ITERATIONS);
a.setShape(1000, 1000);
start = -System.nanoTime();
for (int i = 0; i < 10; i++) {
a.getDouble(i, i);
}
start += System.nanoTime();
start = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(i, i);
}
start += System.nanoTime();
startN = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(new int[] {i, i});
}
startN += System.nanoTime();
startP = -System.nanoTime();
pos = new int[2];
for (int i = 0; i < ITERATIONS; i++) {
pos[0] = i;
pos[1] = i;
a.getDouble(pos);
}
startP += System.nanoTime();
System.out.printf("Get 2D double took %gus (cf %gus and %gus)\n", start*1e-3/ITERATIONS, startN*1e-3/ITERATIONS, startP*1e-3/ITERATIONS);
}
@Test
public void testHash() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.setShape(3,4);
Dataset b = DatasetFactory.createRange(12, Dataset.FLOAT64);
b.setShape(3,4);
b.mean(); // trigger other code path
assertEquals("Hash code", a.hashCode(), b.hashCode());
a.set(Double.POSITIVE_INFINITY, 1, 0);
b.set(Double.POSITIVE_INFINITY, 1, 0);
b.mean(); // trigger other code path
assertEquals("Hash code", a.hashCode(), b.hashCode());
a.set(Double.NaN, 0, 1);
b.set(Double.NaN, 0, 1);
b.mean(); // trigger other code path
assertEquals("Hash code", a.hashCode(), b.hashCode());
}
private static boolean equalsWithNaNs(Dataset a, Dataset b) {
if (a.equals(b))
return true;
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
while (ita.hasNext() && itb.hasNext()) {
final double av = a.getElementDoubleAbs(ita.index);
final double bv = b.getElementDoubleAbs(itb.index);
if (Double.isNaN(av)) {
if (!Double.isNaN(bv))
return false;
} else {
if (av != bv)
return false;
}
}
return true;
}
@Test
public void testMaxSpeed() {
long start;
long elapsed;
final int ITERATIONS = 200;
Dataset a = DatasetFactory.createRange(1000000, Dataset.FLOAT64);
for (int i = 0; i < 10; i++) {
a.set(1, 0);
start = -System.nanoTime();
a.max();
start += System.nanoTime();
}
elapsed = 0;
for (int i = 0; i < ITERATIONS; i++) {
a.set(1, 0);
start = -System.nanoTime();
a.max();
start += System.nanoTime();
elapsed += start;
}
System.out.printf("Max double calculation took %g ms\n", elapsed*1e-6/ITERATIONS);
a = DatasetFactory.createRange(1000000, Dataset.INT16);
elapsed = 0;
for (int i = 0; i < ITERATIONS; i++) {
a.set(1, 0);
start = -System.nanoTime();
a.max();
start += System.nanoTime();
elapsed += start;
}
System.out.printf("Max short calculation took %g ms\n", elapsed*1e-6/ITERATIONS);
}
@Test
public void testSort() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.set(Double.NaN, 0);
a.setShape(3, 4);
a.sort(-1);
TestUtils.verbosePrintf("%s\n", a.toString());
assertEquals("First element", 1, a.getDouble(0,0), 1e-6);
assertTrue("0,3 element", Double.isNaN(a.getDouble(0,3)));
assertEquals("Final element", 11, a.getDouble(2,3), 1e-6);
a.sort(0);
TestUtils.verbosePrintf("%s\n", a.toString());
assertEquals("First element", 1, a.getDouble(0,0), 1e-6);
assertEquals("0,3 element", 7, a.getDouble(0,3), 1e-6);
assertTrue("Final element", Double.isNaN(a.getDouble(2,3)));
a.set(12, 0);
a.sort(null);
TestUtils.verbosePrintf("%s\n", a.toString());
assertEquals("First element", 2, a.getDouble(0), 1e-6);
assertEquals("2,2 element", 12, a.getDouble(2,2), 1e-6);
assertTrue("Final element", Double.isNaN(a.getDouble(2,3)));
}
@Test
public void testPut() {
Dataset d1 = DatasetFactory.createRange(6, Dataset.FLOAT64);
DatasetUtils.put(d1, new int[] {2, 5}, DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {0, 1, -2, 3, 4, -5.5}));
DatasetUtils.put(d1, DatasetFactory.createFromObject(new int[] {0, 4}), DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {-2, 1, -2, 3, -5.5, -5.5}));
d1 = DatasetFactory.createRange(6, Dataset.FLOAT64).reshape(2, 3);
DatasetUtils.put(d1, new int[] {2, 5}, DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {0, 1, -2, 3, 4, -5.5}).reshape(2, 3));
DatasetUtils.put(d1, DatasetFactory.createFromObject(new int[] {0, 4}), DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {-2, 1, -2, 3, -5.5, -5.5}).reshape(2, 3));
}
@Test
public void testTake() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
Dataset t;
t = DatasetUtils.take(a, new int[] {0, 2, 4}, null);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {0, 2, 4}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
a.setShape(new int[] {3,4});
TestUtils.verbosePrintf("%s\n", a.toString());
t = DatasetUtils.take(a, new int[] {0}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {1}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {2}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {0}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {1}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {2}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {3}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
}
/**
* Tests for squeeze method
*/
@Test
public void testSqueeze() {
Dataset ds = DatasetFactory.createRange(10, Dataset.FLOAT64);
ds.setShape(2,1,5);
ds.squeeze();
assertEquals(2, ds.getShape().length);
assertEquals(2, ds.getShape()[0]);
assertEquals(5, ds.getShape()[1]);
int[] os, ns;
os = new int[] { 1, 1 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(0, ns.length);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(0, ns.length);
os = new int[] { 2, 1, 5 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
os = new int[] { 2, 1, 5, 1 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
os = new int[] { 1, 2, 1, 5 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
os = new int[] { 1, 2, 1, 5, 1 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
}
/**
* Tests for tile method
*/
@Test
public void testTile() {
// 1D
Dataset ds = DatasetFactory.createRange(3, Dataset.FLOAT64);
Dataset ta = DatasetUtils.tile(ds, 2);
double[] xa = { 0., 1., 2., 0., 1., 2. };
assertEquals(1, ta.getShape().length);
assertEquals(6, ta.getShape()[0]);
for (int i = 0; i < xa.length; i++) {
assertEquals(xa[i], ((DoubleDataset) ta).getData()[i], 1e-6);
}
Dataset tb = DatasetUtils.tile(ds, 1, 2);
assertEquals(2, tb.getShape().length);
assertEquals(1, tb.getShape()[0]);
assertEquals(6, tb.getShape()[1]);
for (int i = 0; i < xa.length; i++) {
assertEquals(xa[i], ((DoubleDataset) tb).getData()[i], 1e-6);
}
Dataset tc = DatasetUtils.tile(ds, 2, 1);
assertEquals(2, tc.getShape().length);
assertEquals(2, tc.getShape()[0]);
assertEquals(3, tc.getShape()[1]);
for (int i = 0; i < xa.length; i++) {
assertEquals(xa[i], ((DoubleDataset) tc).getData()[i], 1e-6);
}
// 2D
ds = DatasetFactory.createRange(6, Dataset.FLOAT64);
ds.setShape(2,3);
Dataset td = DatasetUtils.tile(ds, 2);
double[] xd = { 0., 1., 2., 0., 1., 2., 3., 4., 5., 3., 4., 5. };
assertEquals(2, td.getShape().length);
assertEquals(2, td.getShape()[0]);
assertEquals(6, td.getShape()[1]);
for (int i = 0; i < xd.length; i++) {
assertEquals(xd[i], ((DoubleDataset) td).getData()[i], 1e-6);
}
Dataset te = DatasetUtils.tile(ds, 1, 2);
assertEquals(2, te.getShape().length);
assertEquals(2, te.getShape()[0]);
assertEquals(6, te.getShape()[1]);
for (int i = 0; i < xd.length; i++) {
assertEquals(xd[i], ((DoubleDataset) te).getData()[i], 1e-6);
}
Dataset tf = DatasetUtils.tile(ds, 2, 1);
double[] xf = { 0., 1., 2., 3., 4., 5., 0., 1., 2., 3., 4., 5. };
assertEquals(2, tf.getShape().length);
assertEquals(4, tf.getShape()[0]);
assertEquals(3, tf.getShape()[1]);
for (int i = 0; i < xf.length; i++) {
assertEquals(xf[i], ((DoubleDataset) tf).getData()[i], 1e-6);
}
}
@Test
public void testTileSpeed() {
int[][] blocks = {{1024,1}, {256,4}, {64,16}, {32,32}, {16,64}, {4, 256}, {1,1024}};
// int[][] blocks = {{1024,64}, {256,64}, {64,64}, {32,64}, {16,64}, {4, 64}, {1,64}};
int[][] shapes = { {1024, 2048}, {2048, 2048}, {2048, 1024} };
for (int b = 0; b < blocks.length; b++) {
for (int s = 0; s < shapes.length; s++) {
for (int n = 0; n < 3; n++)
runTile(blocks[b][0], blocks[b][1], shapes[s][0], shapes[s][1]);
}
}
}
private void runTile(final int srows, final int scols, final int rows, final int cols) {
Dataset a = DatasetFactory.createRange(srows*scols, Dataset.FLOAT64).reshape(srows, scols);
long start, end;
TestUtils.verbosePrintf("Tile %sx%d Block %dx%d: ", rows, cols, srows, scols);
final int nrows = rows/srows;
final int ncols = cols/scols;
start = System.currentTimeMillis();
DoubleDataset b = new DoubleDataset(rows, cols);
final double[] da = (double[]) a.getBuffer();
final double[] db = b.getData();
if (scols == 1) {
for (int i = 0; i < db.length; i++) {
db[i] = da[(i / cols) % srows];
}
} else if (srows == 1) {
for (int i = 0; i < db.length; i++) {
db[i] = da[i % scols];
}
} else {
for (int i = 0; i < db.length; i++) {
db[i] = da[((i / cols) % srows) * scols + i % scols];
}
}
end = System.currentTimeMillis();
long diff1 = end - start;
TestUtils.verbosePrintf("array = %d ms, ", diff1);
start = System.currentTimeMillis();
final Dataset tiled = DatasetUtils.tile(a, nrows, ncols);
end = System.currentTimeMillis();
long diff2 = end - start;
TestUtils.verbosePrintf("tile = %d ms\n", diff2);
assertEquals(rows, tiled.getShape()[0]);
assertEquals(cols, tiled.getShape()[1]);
assertEquals("Datasets not equal", tiled, b);
assertTrue("Creation of tile took more than 25x as long as array creation of same size! (It took "
+ diff2 + ")", diff2 <= (diff1 * 25));
}
/**
* Tests for transpose method
*/
@Test
public void testTranspose() {
// 2D
Dataset ds = DatasetFactory.createRange(6, Dataset.FLOAT64);
ds.setShape(2,3);
Dataset ta = DatasetUtils.transpose(ds, 1, 0);
double[][] xa = { { 0., 1., 2. }, { 3., 4., 5. } };
assertEquals(2, ta.getShape().length);
assertEquals(3, ta.getShape()[0]);
assertEquals(2, ta.getShape()[1]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(xa[i][j], ta.getDouble(j, i), 1e-6);
}
}
ds.set(-2, 1, 2);
assertEquals(-2., ds.getDouble(1, 2), 1e-6);
assertEquals(5., ta.getDouble(2, 1), 1e-6);
ta = ds.getTransposedView(); // test view has changed
assertEquals(-2., ta.getDouble(2, 1), 1e-6);
// 3D
ds = DatasetFactory.createRange(24, Dataset.FLOAT64);
ds.setShape(2, 3, 4);
double[][][] xb = { {{ 0., 1., 2., 3.}, {4., 5., 6., 7.}, {8., 9., 10., 11. }},
{{12., 13., 14., 15.}, {16., 17., 18., 19.}, {20., 21., 22., 23.}} };
Dataset tb;
try {
tb = DatasetUtils.transpose(ds, 0);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tb = DatasetUtils.transpose(ds, 0, -1, 0);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tb = DatasetUtils.transpose(ds, 0, 1, 1);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
tb = DatasetUtils.transpose(ds, 0, 1, 2);
assertEquals(3, tb.getShape().length);
assertEquals(2, tb.getShape()[0]);
assertEquals(3, tb.getShape()[1]);
assertEquals(4, tb.getShape()[2]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 4; k++) {
assertEquals(xb[i][j][k], tb.getDouble(i, j, k), 1e-6);
}
}
}
tb = DatasetUtils.transpose(ds, 1, 0, 2);
assertEquals(3, tb.getShape().length);
assertEquals(3, tb.getShape()[0]);
assertEquals(2, tb.getShape()[1]);
assertEquals(4, tb.getShape()[2]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 4; k++) {
assertEquals(xb[i][j][k], tb.getDouble(j, i, k), 1e-6);
}
}
}
tb = DatasetUtils.transpose(ds, 2, 0, 1);
assertEquals(3, tb.getShape().length);
assertEquals(4, tb.getShape()[0]);
assertEquals(2, tb.getShape()[1]);
assertEquals(3, tb.getShape()[2]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 4; k++) {
assertEquals(xb[i][j][k], tb.getDouble(k, i, j), 1e-6);
}
}
}
}
/**
* Tests for repeat method
*/
@Test
public void testRepeat() {
// 2D
Dataset ds = DatasetFactory.createRange(6, Dataset.FLOAT64);
ds.setShape(2,3);
double[] xa = { 0., 0., 1., 1., 2., 2., 3., 3., 4., 4., 5., 5. };
DoubleDataset ta = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2}, -1);
assertEquals(1, ta.getShape().length);
assertEquals(12, ta.getShape()[0]);
for (int i = 0; i < 12; i++) {
assertEquals(xa[i], ta.get(i), 1e-6);
}
double[][] xb = { { 0., 0., 1., 1., 2., 2. }, { 3., 3., 4., 4., 5., 5. } };
DoubleDataset tb = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2}, 1);
assertEquals(2, tb.getShape().length);
assertEquals(2, tb.getShape()[0]);
assertEquals(6, tb.getShape()[1]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 6; j++) {
assertEquals(xb[i][j], tb.get(i, j), 1e-6);
}
}
double[][] xc = { { 0., 1., 2. }, { 0., 1., 2. }, { 3., 4., 5. }, { 3., 4., 5. } };
DoubleDataset tc = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2}, 0);
assertEquals(2, tc.getShape().length);
assertEquals(4, tc.getShape()[0]);
assertEquals(3, tc.getShape()[1]);
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(xc[i][j], tc.get(i, j), 1e-6);
}
}
double[][] xd = { { 0., 1., 2. }, { 0., 1., 2. }, { 3., 4., 5. } };
DoubleDataset td = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2, 1}, 0);
assertEquals(2, td.getShape().length);
assertEquals(3, td.getShape()[0]);
assertEquals(3, td.getShape()[1]);
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(xd[i][j], td.get(i, j), 1e-6);
}
}
double[][] xe = { { 0., 1., 1., 2., 2., 2.}, { 3., 4., 4., 5., 5., 5. } };
DoubleDataset te = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {1, 2, 3}, 1);
assertEquals(2, te.getShape().length);
assertEquals(2, te.getShape()[0]);
assertEquals(6, te.getShape()[1]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 6; j++) {
assertEquals(xe[i][j], te.get(i, j), 1e-6);
}
}
double[] xf = { 0., 1., 2., 2., 5., 5., 5. };
DoubleDataset tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {1, 1, 2, 0, 0, 3}, -1);
assertEquals(1, tf.getShape().length);
assertEquals(7, tf.getShape()[0]);
for (int i = 0; i < 7; i++) {
assertEquals(xf[i], tf.get(i), 1e-6);
}
try {
tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {0}, 3);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2, 1}, -1);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {-1}, -1);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
}
/**
* Tests for resize method
*/
@Test
public void testResize() {
int size = 6;
Dataset ds = DatasetFactory.createRange(size, Dataset.FLOAT64);
DoubleDataset tf;
IndexIterator it;
tf = (DoubleDataset) DatasetUtils.resize(ds, 3);
assertArrayEquals(new int[] {3}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 8);
assertArrayEquals(new int[] {8}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 3, 4);
assertArrayEquals(new int[] {3, 4}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
ds.setShape(2,3);
tf = (DoubleDataset) DatasetUtils.resize(ds, 3);
assertArrayEquals(new int[] {3}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 8);
assertArrayEquals(new int[] {8}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 3, 4);
assertArrayEquals(new int[] {3, 4}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
}
/**
* Test contents functions
*/
@Test
public void testContents() {
double[] x = { 0, 2., -12.3 };
double[] y = { 2.3, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY };
double[] z = { 1e14, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY };
DoubleDataset ta = new DoubleDataset(x);
assertEquals(false, ta.containsNans());
assertEquals(false, ta.containsInfs());
DoubleDataset tb = new DoubleDataset(y);
assertEquals(true, tb.containsNans());
assertEquals(true, tb.containsInfs());
assertEquals(true, Double.isNaN(tb.min().doubleValue()));
assertEquals(false, Double.isInfinite(tb.min().doubleValue()));
assertEquals(true, Double.isNaN(tb.max().doubleValue()));
assertEquals(false, Double.isInfinite(tb.max().doubleValue()));
Dataset f = tb.cast(Dataset.FLOAT32);
assertEquals(true, f.containsNans());
assertEquals(true, f.containsInfs());
assertEquals(true, Double.isNaN(f.min().doubleValue()));
assertEquals(false, Double.isInfinite(f.min().doubleValue()));
assertEquals(true, Double.isNaN(f.max().doubleValue()));
assertEquals(false, Double.isInfinite(f.max().doubleValue()));
DoubleDataset tc = new DoubleDataset(z);
assertEquals(true, Double.isInfinite(tc.min().doubleValue()));
assertEquals(true, Double.isInfinite(tc.max().doubleValue()));
}
@Test
public void testView() {
Dataset a = DatasetFactory.createRange(20, Dataset.FLOAT64);
Dataset b = a.getView();
assertEquals(true, a.equals(b));
}
/**
* Test equals and hashCode
*/
@Test
public void testEquals() {
Dataset a, b, c, d, e;
a = DatasetFactory.createRange(20, Dataset.FLOAT64);
b = DatasetFactory.createRange(20, Dataset.FLOAT64);
c = a.clone();
d = Maths.add(a, 0.5);
e = DatasetFactory.createRange(20, Dataset.FLOAT32);
assertTrue(a.equals(b));
assertFalse(a == b);
assertTrue(a.equals(c));
assertFalse(a == c);
assertFalse(a.equals(d));
assertFalse(a.equals(e));
HashSet<Dataset> set = new HashSet<Dataset>();
set.add(a);
assertTrue(set.contains(a));
assertTrue(set.contains(b));
assertTrue(set.contains(c));
assertFalse(set.contains(d));
assertFalse(set.contains(e));
set.add(b);
assertEquals(1, set.size());
set.add(d);
set.add(e);
assertEquals(3, set.size());
assertTrue(set.contains(d));
assertTrue(set.contains(e));
assertTrue(set.contains(Maths.subtract(d, 0.5)));
assertFalse(set.contains(Maths.subtract(d, 0.5001)));
assertTrue(set.contains(e.cast(Dataset.FLOAT64)));
assertTrue(set.contains(b.cast(Dataset.FLOAT32)));
}
@Test
public void testPrint() {
Dataset a = DatasetFactory.createRange(1000000, Dataset.INT32);
System.out.println(a);
System.out.println(a.reshape(1000, 1000));
System.out.println(a.reshape(100, 100, 100));
// System.out.println(a.reshape(10, 10, 100, 100));
Dataset b = DatasetFactory.createRange(12, Dataset.INT32);
System.out.println(b);
System.out.println(b.reshape(1,12));
System.out.println(b.reshape(4,1,3));
}
@Test
public void testSlicing() {
Dataset a = DatasetFactory.createRange(1000, Dataset.INT32);
Dataset s, t;
IndexIterator is, it;
s = a.getSlice(null, new int[] {10}, null);
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
is = s.getIterator();
for (int i = 0; is.hasNext(); i++) {
assertEquals(i, s.getElementLongAbs(is.index));
}
t = a.getSlice(new Slice(10));
assertEquals(1, t.getShape().length);
assertEquals(10, t.getShape()[0]);
it = t.getIterator();
for (int i = 0; it.hasNext(); i++) {
assertEquals(i, t.getElementLongAbs(it.index));
}
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(new int[]{0}, new int[] {10}, null);
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
s = a.getSlice(new int[]{-1000}, new int[] {10}, null);
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
s = a.getSlice(new int[] {9}, null, new int[] {-1});
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
is = s.getIterator();
for (int i = 9; is.hasNext(); i--) {
assertEquals(i, s.getElementLongAbs(is.index));
}
t = a.getSlice(new Slice(9, null, -1));
assertEquals(1, t.getShape().length);
assertEquals(10, t.getShape()[0]);
it = t.getIterator();
for (int i = 9; it.hasNext(); i--) {
assertEquals(i, t.getElementLongAbs(it.index));
}
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(new int[] {2}, new int[] {10}, null);
t = a.getSlice(new Slice(2, 10));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(new int[] {2}, new int[] {10}, new int[] {3});
t = a.getSlice(new Slice(2, 10, 3));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
t = a.getSlice(new Slice(2000));
assertArrayEquals(a.getShapeRef(), t.getShapeRef());
t = a.getSlice(new Slice(12, 10, 3));
assertArrayEquals(new int[] {0}, t.getShapeRef());
t = a.getSlice(new Slice(2, 10, -3));
assertArrayEquals(new int[] {0}, t.getShapeRef());
a.setShape(10, 10, 10);
s = a.getSlice(null, null, null);
t = a.getSlice();
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, null, null);
Slice[] slice = null;
t = a.getSlice(slice);
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, new int[] {8, 10, 10}, null);
t = a.getSlice(new Slice(8));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, new int[] {8, 3, 10}, null);
t = a.getSlice(new Slice(8), new Slice(3));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, new int[] {8, 10, 3}, null);
t = a.getSlice(new Slice(8), null, new Slice(3));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
}
@Test
public void testSlicingViews() {
DoubleDataset a, b, c;
a = (DoubleDataset) DatasetFactory.createRange(32, Dataset.FLOAT64).reshape(4, 8);
checkSliceView(a, new int[] {0, 1}, new int[] {3, 5}, new int[] {1, 2});
checkSliceView(a, new int[] {1, -1}, new int[] {-1, 3}, new int[] {1, -2});
a = (DoubleDataset) DatasetFactory.createRange(60, Dataset.FLOAT64).reshape(6, 10);
b = checkSliceView(a, new int[] {0, 1}, new int[] {6, 8}, new int[] {1, 2}); // 6x4
c = (DoubleDataset) b.getSliceView(new int[] {0, 1}, new int[] {1, 4}, null);
c.setShape(3);
checkSliceView(b, new int[] {1, 0}, new int[] {5, 3}, new int[] {2, 1});
checkSliceView(b, new int[] {1, -1}, new int[] {5, 2}, new int[] {2, -1});
c = (DoubleDataset) a.getSlice(new int[] {0, 1}, new int[] {6, 8}, new int[] {1, 2});
b.setShape(2,3,4);
c.setShape(2,3,4);
assertEquals(c, b);
b.setShape(6,4);
b.setShape(6,2,2);
c.setShape(6,2,2);
assertEquals(c, b);
b.setShape(6,4);
try {
b.setShape(2,12);
fail("Should have raised an exception");
} catch (IllegalArgumentException iae) {
// expected
} catch (Exception e) {
fail("Unexpected exception: " + e);
}
b = checkSliceView(a, new int[] {1, -1}, new int[] {-1, 2}, new int[] {1, -2}); // 4x4
checkSliceView(b, new int[] {1, 0}, new int[] {4, 3}, new int[] {2, 1});
checkSliceView(b, new int[] {1, -1}, new int[] {4, 2}, new int[] {2, -1});
b = checkSliceView(a, new int[] {0, 1}, new int[] {1, 8}, new int[] {1, 2}); // 1x4
b = checkSliceView(a, new int[] {0, 1}, new int[] {6, 2}, new int[] {1, 2}); // 6x1
// test special case of zero-rank dataset
a = (DoubleDataset) DatasetFactory.createFromObject(1., Dataset.FLOAT64);
b = (DoubleDataset) a.getSliceView();
b.setShape(1);
assertTrue(b.getIterator().hasNext());
}
private DoubleDataset checkSliceView(DoubleDataset a, int[] start, int[] stop, int[] step) {
DoubleDataset s = (DoubleDataset) a.getSliceView(start, stop, step).squeeze();
DoubleDataset t = (DoubleDataset) a.getSlice(start, stop, step).squeeze();
assertArrayEquals(t.getShape(), s.getShape());
assertEquals(t.toString(true), t, s);
IndexIterator iter = s.getIterator(true);
int[] pos = iter.getPos();
while (iter.hasNext()) {
assertEquals(iter.index, ((AbstractDataset) s).get1DIndex(pos));
int[] p = s.getNDPosition(iter.index);
assertArrayEquals(Arrays.toString(pos) + " : " + Arrays.toString(p), pos, p);
}
// test for correct copying of non-contiguous datasets
assertArrayEquals(((DoubleDataset) t.flatten()).getData(), ((DoubleDataset) s.flatten()).getData(), 1e-15);
TestUtils.assertEquals("Max", t.max().doubleValue(), s.max().doubleValue());
TestUtils.assertEquals("Min", t.min().doubleValue(), s.min().doubleValue());
return s;
}
@Test
public void testSliceStrings() {
String s;
s = Slice.createString(new int[] {3}, null, null, null);
assertEquals(":", s);
s = Slice.createString(new int[] {3}, null, null, new int[] {1});
assertEquals(":", s);
s = Slice.createString(new int[] {3}, null, new int[] {2}, new int[] {1});
assertEquals(":2", s);
s = Slice.createString(new int[] {4}, new int[] {1}, new int[] {3}, new int[] {1});
assertEquals("1:3", s);
s = Slice.createString(new int[] {4}, new int[] {1}, new int[] {2}, new int[] {1});
assertEquals("1", s);
s = Slice.createString(new int[] {4}, new int[] {1}, new int[] {3}, new int[] {2});
assertEquals("1", s);
s = Slice.createString(new int[] {5}, null, null, new int[] {2});
assertEquals("::2", s);
s = Slice.createString(new int[] {5}, new int[] {1}, new int[] {4}, new int[] {2});
assertEquals("1:4:2", s);
s = Slice.createString(new int[] {5}, new int[] {1}, new int[] {5}, new int[] {2});
assertEquals("1::2", s);
s = Slice.createString(new int[] {5}, new int[] {1}, new int[] {3}, new int[] {2});
assertEquals("1", s);
s = Slice.createString(new int[] {3}, null, null, new int[] {-1});
assertEquals("::-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {1}, new int[] {-1});
assertEquals("3:1:-1", s);
s = Slice.createString(new int[] {5}, new int[] {4}, new int[] {1}, new int[] {-1});
assertEquals(":1:-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {0}, new int[] {-1});
assertEquals("3:0:-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {-1}, new int[] {-1});
assertEquals("3::-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {2}, new int[] {-1});
assertEquals("3", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {1}, new int[] {-2});
assertEquals("3", s);
s = Slice.createString(new int[] {3, 2}, null, null, null);
assertEquals(":,:", s);
}
@Test
public void testSetSlice() {
Dataset a = DatasetFactory.createRange(100, Dataset.FLOAT64).reshape(20, 5);
a.setSlice(-2, null, new Slice(null, null, 2));
assertEquals(-2, a.getDouble(0, 0), 1e-15);
assertEquals(1, a.getDouble(0, 1), 1e-15);
assertEquals(-2, a.getDouble(0, 2), 1e-15);
assertEquals(3, a.getDouble(0, 3), 1e-15);
assertEquals(-2, a.getDouble(0, 4), 1e-15);
// with broadcasting
a = DatasetFactory.createRange(100, Dataset.FLOAT64).reshape(20, 5);
a.setSlice(DatasetFactory.createRange(3, Dataset.INT16), new Slice(2, 10), new Slice(null, null, 2));
assertEquals(0, a.getDouble(0, 0), 1e-15);
assertEquals(1, a.getDouble(0, 1), 1e-15);
assertEquals(2, a.getDouble(0, 2), 1e-15);
assertEquals(3, a.getDouble(0, 3), 1e-15);
assertEquals(4, a.getDouble(0, 4), 1e-15);
assertEquals(5, a.getDouble(1, 0), 1e-15);
assertEquals(6, a.getDouble(1, 1), 1e-15);
assertEquals(7, a.getDouble(1, 2), 1e-15);
assertEquals(8, a.getDouble(1, 3), 1e-15);
assertEquals(9, a.getDouble(1, 4), 1e-15);
assertEquals(0, a.getDouble(2, 0), 1e-15);
assertEquals(11, a.getDouble(2, 1), 1e-15);
assertEquals(1, a.getDouble(2, 2), 1e-15);
assertEquals(13, a.getDouble(2, 3), 1e-15);
assertEquals(2, a.getDouble(2, 4), 1e-15);
// compound
CompoundDataset c = DatasetFactory.createRange(3, 100, Dataset.ARRAYFLOAT64).reshape(20, 5);
c.setSlice(DatasetFactory.createRange(3, Dataset.INT16), new Slice(2, 10), new Slice(null, null, 2));
}
@Test
public void test1DErrors() {
// test 1D errors for single value
Dataset a = DatasetFactory.createRange(100, Dataset.INT32);
a.setError(5);
assertEquals(5.0, a.getError(0), 0.001);
assertEquals(5.0, a.getError(50), 0.001);
assertEquals(5.0, a.getError(99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array
Dataset error = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(5.0, error.getDouble(0), 0.001);
assertEquals(5.0, error.getDouble(50), 0.001);
assertEquals(5.0, error.getDouble(99), 0.001);
// Now set the error as a whole array
a.setError(Maths.multiply(error, 2));
assertEquals(10.0, a.getError(0), 0.001);
assertEquals(10.0, a.getError(50), 0.001);
assertEquals(10.0, a.getError(99), 0.001);
// test pulling the error out again, to make sure its correct
Dataset error2 = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error2);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(10.0, error2.getDouble(0), 0.001);
assertEquals(10.0, error2.getDouble(50), 0.001);
assertEquals(10.0, error2.getDouble(99), 0.001);
}
@Test
public void test2DErrors() {
// test 1D errors for single value
Dataset a = DatasetFactory.zeros(new int[] {100,100}, Dataset.INT32);
a.setError(5);
assertEquals(5.0, a.getError(0,0), 0.001);
assertEquals(5.0, a.getError(50,50), 0.001);
assertEquals(5.0, a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array
Dataset error = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(5.0, error.getDouble(0,0), 0.001);
assertEquals(5.0, error.getDouble(50,50), 0.001);
assertEquals(5.0, error.getDouble(99,99), 0.001);
// Now set the error as a whole array
a.setError(Maths.multiply(error, 2));
assertEquals(10.0, a.getError(0,0), 0.001);
assertEquals(10.0, a.getError(50,50), 0.001);
assertEquals(10.0, a.getError(99,99), 0.001);
// test pulling the error out again, to make sure its correct
Dataset error2 = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error2);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(10.0, error2.getDouble(0,0), 0.001);
assertEquals(10.0, error2.getDouble(50,50), 0.001);
assertEquals(10.0, error2.getDouble(99,99), 0.001);
}
@Test
public void testSetErrorBuffer() {
Dataset a = DatasetFactory.zeros(new int[] {100,100}, Dataset.INT32);
Dataset err = DatasetFactory.createLinearSpace(0, a.getSize() - 1, a.getSize(), Dataset.FLOAT64);
err.setShape(a.getShape());
a.setErrorBuffer(null);
assertFalse(a.hasErrors());
a.setErrorBuffer(25.0);
assertEquals(5.0, a.getError(0,0), 0.001);
assertEquals(5.0, a.getError(50,50), 0.001);
assertEquals(5.0, a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array and check compatibility
Dataset error = a.getError();
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
a.setErrorBuffer(err);
assertEquals(0.0, a.getError(0,0), 0.001);
assertEquals(Math.sqrt(50.0 + 100*50.0), a.getError(50,50), 0.001);
assertEquals(Math.sqrt(99.0 + 100*99.0), a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array and check compatibility
error = a.getError();
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
a.setErrorBuffer(err.getBuffer());
assertEquals(0.0, a.getError(0,0), 0.001);
assertEquals(Math.sqrt(35.0 + 100*25.0), a.getError(25,35), 0.001);
assertEquals(Math.sqrt(99.0 + 100*99.0), a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array and check compatibility
error = a.getError();
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
}
@Test
public void testInternalErrors() {
// test 1D errors for single value
Dataset a = DatasetFactory.createRange(100, Dataset.INT32);
a.setError(5);
// should be squared
Number ne = (Number) a.getErrorBuffer().getObjectAbs(0);
assertEquals(25.0, ne.doubleValue(), 0.001);
// now for pulling out the full error array
Dataset error = a.getError();
a.setError(Maths.multiply(error, 2));
// should also be squared
Dataset ae = a.getErrorBuffer();
assertEquals(100.0, ae.getDouble(0), 0.001);
assertEquals(100.0, ae.getDouble(50), 0.001);
assertEquals(100.0, ae.getDouble(99), 0.001);
}
@Test
public void testZeroRankDatasets() {
Dataset a;
a = DoubleDataset.ones();
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", 1.0, a.getObject());
assertEquals("Max", 1.0, a.max());
assertEquals("Min", 1.0, a.min());
assertEquals("MaxPos", 0, a.maxPos().length);
assertEquals("MinPos", 0, a.minPos().length);
assertEquals("ArgMax", 0, a.argMax());
assertEquals("ArgMin", 0, a.argMin());
assertEquals("Value", true, a.equals(new Double(1.0)));
a = DatasetFactory.zeros(new int[] {}, Dataset.INT16);
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", (short) 0, a.getObject());
a = DatasetFactory.createFromObject(new Complex(1.0, -0.5));
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", new Complex(1.0, -0.5), a.getObject());
assertEquals("Real view value", 1.0, a.realView().getObject());
assertEquals("Imaginary view value", -0.5, ((ComplexDoubleDataset) a).imagView().getObject());
a = DatasetFactory.createFromObject(1.f);
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", 1.f, a.getObject());
a = DoubleDataset.ones(1);
a.squeeze();
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
a = DatasetFactory.createFromObject(1.f);
assertEquals("Equals", a, DatasetFactory.createFromObject(1.f));
assertFalse("Differs", a.equals(DatasetFactory.createFromObject(2.f)));
}
@Test
public void testConcatenate() {
Dataset a, b, c, d;
a = DatasetFactory.createRange(6, Dataset.FLOAT64);
b = DatasetFactory.createRange(6, 8, 1, Dataset.FLOAT64);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
d = DatasetFactory.createRange(8, Dataset.FLOAT64);
assertEquals("Rank", 1, c.getRank());
assertTrue("Dataset", c.equals(d));
a = DatasetFactory.createRange(6, Dataset.FLOAT64).reshape(3,2);
b = DatasetFactory.createRange(6, 8, 1, Dataset.FLOAT64).reshape(1,2);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
d = DatasetFactory.createRange(8, Dataset.FLOAT64).reshape(4,2);
assertEquals("Rank", 2, c.getRank());
assertTrue("Dataset", c.equals(d));
a.setShape(2,3);
b = DatasetFactory.createRange(6, 9, 1, Dataset.FLOAT64).reshape(1,3);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
d = DatasetFactory.createRange(9, Dataset.FLOAT64).reshape(3,3);
assertEquals("Rank", 2, c.getRank());
assertTrue("Dataset", c.equals(d));
a = DatasetFactory.createRange(2, Dataset.FLOAT64).reshape(1,2);
b = DatasetFactory.createRange(3, 5, 1, Dataset.FLOAT64).reshape(1,2);
a = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
b = DatasetFactory.createRange(2, 6, 3, Dataset.FLOAT64).reshape(2,1);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 1);
d = DatasetFactory.createRange(6, Dataset.FLOAT64).reshape(2,3);
assertEquals("Rank", 2, c.getRank());
assertTrue("Dataset", c.equals(d));
}
@Test
public void testSum() {
Dataset a = DatasetFactory.createRange(1024*1024, Dataset.INT32);
assertEquals("Typed sum", -524288, a.typedSum(Dataset.INT32));
a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.setShape(3,4);
assertEquals("Sum", 11*6, ((Number) a.sum()).doubleValue(), 1e-6);
a.set(Double.NaN, 0,0);
assertTrue("Sum", Double.isNaN(((Number) a.sum()).doubleValue()));
assertEquals("Sum", 11*6, ((Number) a.sum(true)).doubleValue(), 1e-6);
}
@Test
public void testMakeFinite() {
Dataset a = DatasetFactory.createFromObject(new double[] {0, Double.POSITIVE_INFINITY, Double.NaN, Double.NEGATIVE_INFINITY });
DatasetUtils.makeFinite(a);
assertTrue("Make finite", DatasetFactory.createFromObject(new double[] {0, Double.MAX_VALUE, 0, -Double.MAX_VALUE}).equals(a));
}
@Test
public void testCast() {
long[] udata = new long[] {0, 1, 127, 128, 255, 256, 32767, 32768, 65535, 65536, 2147483647L, 2147483648L, 4294967295L, 4294967296L};
Dataset d = new LongDataset(udata);
Dataset a, c;
c = DatasetUtils.cast(d, Dataset.INT32);
Assert.assertTrue(c.max().doubleValue() < d.max().doubleValue()); // check stored values
a = DatasetFactory.createFromObject(c, true);
assertEquals("", 0, a.getLong(13));
for (int i = 0; i < 13; i++)
assertEquals("", udata[i], a.getLong(i));
c = DatasetUtils.cast(d, Dataset.INT16);
Assert.assertTrue(c.max().doubleValue() < d.max().doubleValue());
a = DatasetFactory.createFromObject(c, true);
assertEquals("", 0, a.getLong(9));
for (int i = 0; i < 9; i++)
assertEquals("", udata[i], a.getLong(i));
c = DatasetUtils.cast(d, Dataset.INT8);
Assert.assertTrue(c.max().doubleValue() < d.max().doubleValue());
a = DatasetFactory.createFromObject(c, true);
assertEquals("", 0, a.getLong(5));
for (int i = 0; i < 5; i++)
assertEquals("", udata[i], a.getLong(i));
}
@Test
public void testRoll() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
Dataset r = DatasetUtils.roll(a, 2, null);
TestUtils.assertDatasetEquals(r, Maths.add(a, 10-2).iremainder(10), 1e-6, 1e-6);
r = DatasetUtils.roll(a, -2, null);
TestUtils.assertDatasetEquals(r, Maths.add(a, 10+2).iremainder(10), 1e-6, 1e-6);
a.setShape(2,5);
r = DatasetUtils.roll(a, 1, null);
TestUtils.assertDatasetEquals(r, Maths.add(a, 10-1).iremainder(10).reshape(2,5), 1e-6, 1e-6);
r = DatasetUtils.roll(a, 1, 0);
TestUtils.assertDatasetEquals(r, Maths.add(a, 5).iremainder(10).reshape(2,5), 1e-6, 1e-6);
r = DatasetUtils.roll(a, 1, 1);
TestUtils.assertDatasetEquals(r, new IntegerDataset(new int[] {4, 0, 1, 2, 3, 9, 5, 6, 7, 8}, 2,5), 1e-6, 1e-6);
}
@Test
public void testRollAxis() {
Dataset a = DatasetFactory.ones(new int[] {3, 4, 5, 6}, Dataset.INT8);
Assert.assertArrayEquals(new int[] {3, 6, 4, 5}, DatasetUtils.rollAxis(a, 3, 1).getShape());
Assert.assertArrayEquals(new int[] {5, 3, 4, 6}, DatasetUtils.rollAxis(a, 2, 0).getShape());
Assert.assertArrayEquals(new int[] {3, 5, 6, 4}, DatasetUtils.rollAxis(a, 1, 4).getShape());
}
@Test
public void testFindOccurrences() {
Dataset a = new DoubleDataset(new double[] {0, 0, 3, 7, -4, 2, 1});
Dataset v = DatasetFactory.createRange(-3, 3, 1, Dataset.FLOAT64);
Dataset indexes = DatasetUtils.findFirstOccurrences(a, v);
TestUtils.assertDatasetEquals(new IntegerDataset(new int[] {-1, -1, -1, 0, 6, 5}, null), indexes, true, 1, 1);
}
@Test
public void testFindIndexes() {
Dataset a = new DoubleDataset(new double[] {0, 0, 3, 7, -4, 2, 1});
Dataset v = DatasetFactory.createRange(-3, 3, 1, Dataset.FLOAT64);
IntegerDataset indexes = DatasetUtils.findIndexesForValues(a, v);
TestUtils.assertDatasetEquals(new IntegerDataset(new int[] {3, 3, -1, -1, -1, 5, 4}, null), indexes, true, 1, 1);
v = new DoubleDataset(new double[] {-4, 0, 1, 2, 3, 7});
indexes = DatasetUtils.findIndexesForValues(a, v);
TestUtils.assertDatasetEquals(a, v.getBy1DIndex(indexes), true, 1e-6, 1e-6);
}
@Test
public void testAppend() {
double[] x = { 0., 1., 2., 3., 4., 5. };
Dataset d1 = DoubleDataset.createRange(3.);
Dataset d2 = DoubleDataset.createRange(3., 6., 1.);
Dataset d3 = DatasetUtils.append(d1, d2, 0);
for (int i = 0; i < x.length; i++) {
assertEquals("Append 1", x[i], d3.getDouble(i), 1e-8);
}
d1.setShape(1, 3);
d2.setShape(1, 3);
d3 = DatasetUtils.append(d1, d2, 0);
Dataset d4 = new DoubleDataset(x);
d4.setShape(2, 3);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
assertEquals("Append 2", d4.getDouble(i, j), d3.getDouble(i, j), 1e-8);
}
}
d3 = DatasetUtils.append(d1, d2, 1);
d4 = new DoubleDataset(x);
d4.setShape(1, 6);
for (int i = 0; i < 1; i++) {
for (int j = 0; j < 6; j++) {
assertEquals("Append 3", d4.getDouble(i, j), d3.getDouble(i, j), 1e-8);
}
}
}
public static void checkDatasets(Dataset calc, Dataset expected) {
checkDatasets(expected, calc, 1e-5, 1e-5);
}
public static void checkDatasets(Dataset expected, Dataset calc, double relTol, double absTol) {
checkDatasets(expected, calc, false, relTol, absTol);
}
public static void checkDatasets(Dataset expected, Dataset calc, boolean valuesOnly, double relTol, double absTol) {
int type = expected.getDtype();
if (!valuesOnly) {
Assert.assertEquals("Type", type, calc.getDtype());
Assert.assertEquals("Items", expected.getElementsPerItem(), calc.getElementsPerItem());
}
Assert.assertEquals("Size", expected.getSize(), calc.getSize());
try {
Assert.assertArrayEquals("Shape", expected.getShape(), calc.getShape());
} catch (AssertionError e) {
if (calc.getSize() == 1) {
Assert.assertArrayEquals("Shape", new int[0], calc.getShape());
} else {
throw e;
}
}
IndexIterator at = expected.getIterator(true);
IndexIterator bt = calc.getIterator();
final int eis = expected.getElementsPerItem();
final int cis = calc.getElementsPerItem();
final int is = Math.max(eis, cis);
if (expected.elementClass().equals(Boolean.class)) {
while (at.hasNext() && bt.hasNext()) {
for (int j = 0; j < is; j++) {
boolean e = j >= eis ? false : expected.getElementBooleanAbs(at.index + j);
boolean c = j >= cis ? false : calc.getElementBooleanAbs(bt.index + j);
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + "; " + j +
": ", e, c);
}
}
} else if (expected.hasFloatingPointElements()) {
while (at.hasNext() && bt.hasNext()) {
for (int j = 0; j < is; j++) {
double e = j >= eis ? 0 : expected.getElementDoubleAbs(at.index + j);
double c = j >= cis ? 0 : calc.getElementDoubleAbs(bt.index + j);
double t = Math.max(absTol, relTol*Math.max(Math.abs(e), Math.abs(c)));
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + "; " + j +
": ", e, c, t);
}
}
} else if (type == Dataset.STRING) {
StringDataset es = (StringDataset) expected;
StringDataset cs = (StringDataset) calc;
while (at.hasNext() && bt.hasNext()) {
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + ": ",
es.getAbs(at.index), cs.getAbs(bt.index));
}
} else if (type == Dataset.OBJECT) {
ObjectDataset eo = (ObjectDataset) expected;
ObjectDataset co = (ObjectDataset) calc;
while (at.hasNext() && bt.hasNext()) {
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + ": ",
eo.getAbs(at.index), co.getAbs(bt.index));
}
} else {
while (at.hasNext() && bt.hasNext()) {
for (int j = 0; j < is; j++) {
long e = j >= eis ? 0 : expected.getElementLongAbs(at.index + j);
long c = j >= cis ? 0 : calc.getElementLongAbs(bt.index + j);
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + "; " + j +
": ", e, c);
}
}
}
}
@Test
public void testSelect() {
DoubleDataset a = new DoubleDataset(new double[] { 0, 1, 3, 5, -7, -9 });
DoubleDataset b = new DoubleDataset(new double[] { 0.01, 1.2, 2.9, 5, -7.1, -9 });
Dataset c = a.clone().reshape(2, 3);
BooleanDataset d = new BooleanDataset(new boolean[] {false, true, false, false, true, false}, 2, 3);
DoubleDataset e = (DoubleDataset) DatasetUtils.select(new BooleanDataset[] {d}, new Object[] {c}, -2);
checkDatasets(e, new DoubleDataset(new double[] {-2, 1, -2, -2, -7, -2}, 2, 3));
Dataset f = b.clone().reshape(2, 3);
BooleanDataset g = new BooleanDataset(new boolean[] {false, true, true, false, false, false}, 2, 3);
e = (DoubleDataset) DatasetUtils.select(new BooleanDataset[] {d, g}, new Dataset[] {c, f}, -2.5);
checkDatasets(e, new DoubleDataset(new double[] {-2.5, 1, 2.9, -2.5, -7, -2.5}, 2, 3));
e = (DoubleDataset) DatasetUtils.select(d, c, -2);
checkDatasets(e, new DoubleDataset(new double[] {-2, 1, -2, -2, -7, -2}, 2, 3));
}
@Test
public void testChoose() {
DoubleDataset a = new DoubleDataset(new double[] { 0, 1, 3, 5, -7, -9 });
DoubleDataset b = new DoubleDataset(new double[] { 0.01, 1.2, 2.9, 5, -7.1, -9 });
Dataset c = a.clone().reshape(2, 3);
IntegerDataset d = new IntegerDataset(new int[] {0, 0, 1, 1, 0, 1}, 2, 3);
DoubleDataset e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, true, false);
checkDatasets(e, new DoubleDataset(new double[] {0, 1, -2, -2, -7, -2}, 2, 3));
d = new IntegerDataset(new int[] {-2, 0, 3, 1, 0, 2}, 2, 3);
try {
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, true, false);
fail("Should have thrown an array index OOB exception");
} catch (ArrayIndexOutOfBoundsException oob) {
// expected
}
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, false, false);
checkDatasets(e, new DoubleDataset(new double[] {0, 1, -2, -2, -7, -9}, 2, 3));
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, false, true);
checkDatasets(e, new DoubleDataset(new double[] {0, 1, -2, -2, -7, -2}, 2, 3));
Dataset f = b.clone().reshape(2, 3);
IntegerDataset g = new IntegerDataset(new int[] {1, 0, 1, 1, 2, 2}, 2, 3);
e = (DoubleDataset) DatasetUtils.choose(g, new Object[] {c, f, -2}, true, false);
checkDatasets(e, new DoubleDataset(new double[] {0.01, 1, 2.9, 5, -2, -2}, 2, 3));
g = new IntegerDataset(new int[] {-1, 3, 1, 1, 2, 2}, 2, 3);
try {
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, f, -2}, true, false);
fail("Should have thrown an array index OOB exception");
} catch (ArrayIndexOutOfBoundsException oob) {
// expected
}
e = (DoubleDataset) DatasetUtils.choose(g, new Object[] {c, f, -2}, false, false);
checkDatasets(e, new DoubleDataset(new double[] {-2, 1, 2.9, 5, -2, -2}, 2, 3));
e = (DoubleDataset) DatasetUtils.choose(g, new Object[] {c, f, -2}, false, true);
checkDatasets(e, new DoubleDataset(new double[] {0, -2, 2.9, 5, -2, -2}, 2, 3));
}
@Test
public void testSize() {
int[] zero = new int[] {0};
int[] one = new int[] {};
int[] small = new int[] {2};
int[] medium = new int[] {1024, 1024};
int[] large = new int[] {1024, 1024, 1024};
int[] xxxlarge = new int[] {1024, 1024, 1024, 1024};
int[] bad = new int[] {1024, -1, 1024};
assertEquals(0, AbstractDataset.calcLongSize(zero));
assertEquals(0, AbstractDataset.calcSize(zero));
assertEquals(1, AbstractDataset.calcLongSize(one));
assertEquals(1, AbstractDataset.calcSize(one));
assertEquals(2, AbstractDataset.calcLongSize(small));
assertEquals(2, AbstractDataset.calcSize(small));
assertEquals(1024*1024, AbstractDataset.calcLongSize(medium));
assertEquals(1024*1024, AbstractDataset.calcSize(medium));
assertEquals(1024*1024*1024, AbstractDataset.calcLongSize(large));
assertEquals(1024*1024*1024, AbstractDataset.calcSize(large));
assertEquals(1024*1024*1024*1024L, AbstractDataset.calcLongSize(xxxlarge));
try {
AbstractDataset.calcSize(xxxlarge);
fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// expected
} catch (Throwable t) {
fail("Should have thrown an illegal argument exception");
}
try {
AbstractDataset.calcLongSize(bad);
fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// expected
} catch (Throwable t) {
fail("Should have thrown an illegal argument exception");
}
try {
AbstractDataset.calcSize(bad);
fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// expected
} catch (Throwable t) {
fail("Should have thrown an illegal argument exception");
}
}
@Test
public void testFill() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
Dataset b = DatasetFactory.zeros(a);
a.fill(0);
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(0.);
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(0L);
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(new Complex(0));
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(DatasetFactory.createFromObject(0));
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(DatasetFactory.createFromObject(new int[] {0}));
checkDatasets(a, b, 1e-15, 1e-20);
try {
a.fill(DatasetFactory.createFromObject(new int[] {0, 1}));
fail();
} catch (IllegalArgumentException e) {
}
}
@Test
public void testPositions() {
int[] shape = new int[] { 23, 34, 2 };
int[] indexes = new int[] {1, 10, 70, 171};
List<IntegerDataset> list = DatasetUtils.calcPositionsFromIndexes(new IntegerDataset(indexes, 2, 2), shape);
Assert.assertEquals(shape.length, list.size());
IntegerDataset l = list.get(0);
Assert.assertEquals(2, l.getRank());
Assert.assertEquals(2, l.getShapeRef()[0]);
Assert.assertEquals(2, l.getShapeRef()[1]);
checkPositions(list, new int[] {0, 0, 1}, 0, 0);
checkPositions(list, new int[] {0, 5, 0}, 0, 1);
checkPositions(list, new int[] {1, 1, 0}, 1, 0);
checkPositions(list, new int[] {2, 17, 1}, 1, 1);
}
private void checkPositions(List<IntegerDataset> list, int[] expected, int... position) {
int j = 0;
for (int i : expected) {
IntegerDataset l = list.get(j++);
Assert.assertEquals(i, l.getInt(position));
}
}
@Test
public void testIndexes() {
List<IntegerDataset> list = new ArrayList<IntegerDataset>();
int[] shape = new int[] { 23, 34, 2 };
list.add(new IntegerDataset(new int[] {0, 0, 1, 2}, 2, 2));
list.add(new IntegerDataset(new int[] {0, 5, 1, 17}, 2, 2));
list.add(new IntegerDataset(new int[] {1, 0, 0, 1}, 2, 2));
IntegerDataset indexes = DatasetUtils.calcIndexesFromPositions(list, shape, null);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {0, -5, 1, 17}, 2, 2));
try {
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, null);
Assert.fail("Should have thrown an exception");
} catch (Exception e) {
}
list.set(1, new IntegerDataset(new int[] {0, 34, 1, 17}, 2, 2));
try {
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, null);
Assert.fail("Should have thrown an exception");
} catch (Exception e) {
}
list.set(1, new IntegerDataset(new int[] {0, 39, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 1);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {0, -29, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 1);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {-2, 5, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 2);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {34, 5, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 2);
checkDatasets(indexes, new IntegerDataset(new int[] {33*2 + 1, 10, 70, 171}, 2, 2));
}
@Test
public void testSetByBoolean() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
a.max();
a.setByBoolean(0, Comparisons.greaterThan(a, 5));
Assert.assertEquals(a.max().longValue(), 5);
}
@Test
public void testExtract() {
Dataset a = DatasetFactory.createRange(20, Dataset.INT32).reshape(4,5);
Dataset b = DatasetFactory.createFromObject(new boolean[] {true, false, true, false, false});
checkDatasets(DatasetUtils.extract(a, b), DatasetFactory.createFromObject(new int[] {0, 2, 5, 7, 10, 12, 15, 17}));
}
@Test
public void testSetBy1DIndex() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
a.max();
a.setBy1DIndex(0, Comparisons.nonZero(Comparisons.greaterThan(a, 5)).get(0));
Assert.assertEquals(a.max().longValue(), 5);
}
@Test
public void testSetByPosition() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
a.max();
List<IntegerDataset> list = Comparisons.nonZero(Comparisons.greaterThan(a, 5));
a.setByIndexes(0, list.get(0));
Assert.assertEquals(a.max().longValue(), 5);
a = DatasetFactory.createRange(10, Dataset.INT32).reshape(2, 5);
a.max();
list = Comparisons.nonZero(Comparisons.greaterThan(a, 5));
a.setByIndexes(0, list.get(0), list.get(1));
Assert.assertEquals(a.max().longValue(), 5);
}
@Test
public void testReshape() {
Dataset a = DatasetFactory.createRange(60, Dataset.INT32);
Dataset b = a.getSliceView(new int[] {1}, null, new int[] {2});
Dataset c = a.getSlice(new int[] {1}, null, new int[] {2});
checkDatasets(b, c);
// check if strides still work
b.setShape(6, 5);
c.setShape(6, 5);
checkDatasets(b, c);
b.setShape(1, 6, 5);
c.setShape(1, 6, 5);
checkDatasets(b, c);
b.setShape(1, 6, 1, 5);
c.setShape(1, 6, 1, 5);
checkDatasets(b, c);
b.setShape(30);
c.setShape(30);
checkDatasets(b, c);
b.setShape(6, 5);
try {
Dataset d = b.getSliceView(new Slice(1,6,2));
d.setShape(15);
Assert.fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// do nothing
} catch (Exception e) {
Assert.fail("Should have thrown an illegal argument exception");
}
}
@Test
public void testDatasetVariance() {
Random.seed(12345);
final Dataset image = Maths.multiply(Random.rand(new int[] { 10, 10 }), 1);
double mean = ((Number) image.mean()).doubleValue();
Dataset square = Maths.square(Maths.subtract(image, mean));
double var = ((Number) square.mean()).doubleValue();
Assert.assertEquals(var, image.variance(true).doubleValue(), var * 1.e-15);
}
@Test
public void testBroadcast() {
Dataset a = DatasetFactory.createRange(3, Dataset.INT32);
Dataset b = checkBroadcast2D(a, false, 2, 3);
Assert.assertEquals(1, b.getInt(0, 1));
Assert.assertEquals(1, b.getInt(1, 1));
a.setShape(3, 1);
b = checkBroadcast2D(a, true, 3, 4);
Assert.assertEquals(1, b.getInt(1, 0));
Assert.assertEquals(1, b.getInt(1, 1));
}
private Dataset checkBroadcast2D(Dataset a, boolean broadcastFirstDim, int... broadcastShape) {
Dataset b = a.getBroadcastView(broadcastShape);
Assert.assertArrayEquals(broadcastShape, b.getShape());
int size = AbstractDataset.calcSize(broadcastShape);
Assert.assertEquals(size, b.getSize());
IndexIterator it = b.getIterator(true);
int[] pos = it.getPos();
int i = 0;
while (it.hasNext()) {
i++;
if (broadcastFirstDim) {
Assert.assertEquals(a.getInt(pos[0], 0), b.getInt(pos));
Assert.assertEquals(a.getInt(pos[0], 0), b.getElementLongAbs(it.index));
} else {
Assert.assertEquals(a.getInt(pos[1]), b.getInt(pos));
Assert.assertEquals(a.getInt(pos[1]), b.getElementLongAbs(it.index));
}
}
Assert.assertEquals(size, i);
return b;
}
}
|
org.eclipse.dawnsci.analysis.dataset.test/src/org/eclipse/dawnsci/analysis/dataset/AbstractDatasetTest.java
|
/*-
* Copyright (c) 2012 Diamond Light Source Ltd.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.dawnsci.analysis.dataset;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import org.apache.commons.math3.complex.Complex;
import org.eclipse.dawnsci.analysis.api.dataset.IDataset;
import org.eclipse.dawnsci.analysis.api.dataset.Slice;
import org.eclipse.dawnsci.analysis.dataset.impl.AbstractDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.BooleanDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Comparisons;
import org.eclipse.dawnsci.analysis.dataset.impl.ComplexDoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.CompoundDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Dataset;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetFactory;
import org.eclipse.dawnsci.analysis.dataset.impl.DatasetUtils;
import org.eclipse.dawnsci.analysis.dataset.impl.DoubleDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.IndexIterator;
import org.eclipse.dawnsci.analysis.dataset.impl.IntegerDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.LongDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Maths;
import org.eclipse.dawnsci.analysis.dataset.impl.ObjectDataset;
import org.eclipse.dawnsci.analysis.dataset.impl.Random;
import org.eclipse.dawnsci.analysis.dataset.impl.StringDataset;
import org.junit.Assert;
import org.junit.Test;
public class AbstractDatasetTest {
@Test
public void testBestDType() {
assertEquals(Dataset.FLOAT32, AbstractDataset.getBestDType(Dataset.INT16, Dataset.FLOAT32));
assertEquals(Dataset.FLOAT64, AbstractDataset.getBestDType(Dataset.INT32, Dataset.FLOAT32));
assertEquals(Dataset.COMPLEX64, AbstractDataset.getBestDType(Dataset.FLOAT32, Dataset.COMPLEX64));
assertEquals(Dataset.COMPLEX128, AbstractDataset.getBestDType(Dataset.INT32, Dataset.COMPLEX64));
}
@Test
public void testCompatibleShapes() {
assertTrue("[] and []", AbstractDataset.areShapesCompatible(new int[] {}, new int[] {}));
assertTrue("[1] and []", AbstractDataset.areShapesCompatible(new int[] {1}, new int[] {}));
assertFalse("[2] and []", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {}));
assertTrue("[2] and [2]", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {2}));
assertTrue("[3] and [3]", AbstractDataset.areShapesCompatible(new int[] {3}, new int[] {3}));
assertTrue("[1,2] and [2]", AbstractDataset.areShapesCompatible(new int[] {1,2}, new int[] {2}));
assertTrue("[2] and [1,2]", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {1,2}));
assertFalse("[10,10] and [10,10,10]", AbstractDataset.areShapesCompatible(new int[] {10,10}, new int[] {10,10,10}));
assertFalse("[10,10,10] and [10,10]", AbstractDataset.areShapesCompatible(new int[] {10,10,10}, new int[] {10,10}));
assertTrue("[2] and [2,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {2,1,1,1}));
assertTrue("[2,1] and [2,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {2,1}, new int[] {2,1,1,1}));
assertFalse("[2,1] and [3,1,1,2]", AbstractDataset.areShapesCompatible(new int[] {2,1}, new int[] {3,1,1,2}));
assertFalse("[2,1] and [3,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {2,1}, new int[] {3,1,1,1}));
assertTrue("[1,2,1] and [2,1,1,1]", AbstractDataset.areShapesCompatible(new int[] {1,2,1}, new int[] {2,1,1,1}));
assertTrue("[1,2,1,3] and [2,1,1,1,3]", AbstractDataset.areShapesCompatible(new int[] {1,2,1,3}, new int[] {2,1,1,1,3}));
assertTrue("[2,1,1] and [1,1,2]", AbstractDataset.areShapesCompatible(new int[] {2,1,1}, new int[] {1,1,2}));
assertFalse("[2,1,1] and [1,1,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,1}, new int[] {1,1,3}));
assertFalse("[2,1,4] and [2,1,1,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,1,3}));
assertFalse("[2,1,4] and [2,1,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,3}));
assertFalse("[2,4] and [2,3]", AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {2,3}));
assertTrue("[2,1,4] and [2,1,4]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,4}));
assertTrue("[2,1,4] and [2,1,1,4]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,1,1,4}));
assertFalse("[2,4] and [2,4,3]", AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {2,4,3}));
assertFalse("[2,1,4] and [2,4,3]", AbstractDataset.areShapesCompatible(new int[] {2,1,4}, new int[] {2,4,3}));
assertTrue(AbstractDataset.areShapesCompatible(new int[] {}, new int[] {}, 0));
assertTrue(AbstractDataset.areShapesCompatible(new int[] {2}, new int[] {3}, 0));
assertFalse(AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {3,4}, 1));
assertTrue(AbstractDataset.areShapesCompatible(new int[] {2,4}, new int[] {3,4}, 0));
// assertTrue(AbstractDataset.areShapesCompatible(new int[] {}, new int[] {}));
}
@Test
public void testBroadcastCompatibleShapes() {
assertTrue("[] and []", AbstractDataset.areShapesBroadcastCompatible(new int[] {}, new int[] {}));
assertTrue("[1] and []", AbstractDataset.areShapesBroadcastCompatible(new int[] {1}, new int[] {}));
assertTrue("[2] and []", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {}));
assertTrue("[2] and [2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {2}));
assertTrue("[3] and [3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {3}, new int[] {3}));
assertTrue("[1,2] and [2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {1,2}, new int[] {2}));
assertTrue("[2] and [1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {1,2}));
assertTrue("[4,2] and [1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {4,2}, new int[] {1,2}));
assertTrue("[10,10] and [10,10,10]", AbstractDataset.areShapesBroadcastCompatible(new int[] {10,10}, new int[] {10,10,10}));
assertTrue("[10,10,10] and [10,10]", AbstractDataset.areShapesBroadcastCompatible(new int[] {10,10,10}, new int[] {10,10}));
assertTrue("[2] and [2,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2}, new int[] {2,1,1,1}));
assertTrue("[2,1] and [2,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1}, new int[] {2,1,1,1}));
assertTrue("[2,1] and [3,1,1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1}, new int[] {3,1,1,2}));
assertTrue("[2,1] and [3,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1}, new int[] {3,1,1,1}));
assertTrue("[1,2,1] and [2,1,1,1]", AbstractDataset.areShapesBroadcastCompatible(new int[] {1,2,1}, new int[] {2,1,1,1}));
assertTrue("[1,2,1,3] and [2,1,1,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {1,2,1,3}, new int[] {2,1,1,1,3}));
assertTrue("[2,1,1] and [1,1,2]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,1}, new int[] {1,1,2}));
assertTrue("[2,1,1] and [1,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,1}, new int[] {1,1,3}));
assertFalse("[2,1,4] and [2,1,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,1,3}));
assertFalse("[2,1,4] and [2,1,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,3}));
assertFalse("[2,4] and [2,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,4}, new int[] {2,3}));
assertTrue("[2,1,4] and [2,1,4]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,4}));
assertTrue("[2,1,4] and [2,1,1,4]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,1,1,4}));
assertFalse("[2,4] and [2,4,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,4}, new int[] {2,4,3}));
assertFalse("[2,1,4] and [2,4,3]", AbstractDataset.areShapesBroadcastCompatible(new int[] {2,1,4}, new int[] {2,4,3}));
}
@Test
public void testMaxMin() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.setShape(3,4);
assertEquals("Max", 11, a.max().doubleValue(), 1e-6);
assertEquals("Max 0", DatasetFactory.createFromObject(new double[] {8,9,10,11}), a.max(0));
assertEquals("Max 1", DatasetFactory.createFromObject(new double[] {3,7,11}), a.max(1));
assertEquals("Max arg", 11, a.argMax());
assertEquals("Max arg 0 ", DatasetFactory.createFromObject(new int[] {2,2,2,2}), a.argMax(0));
assertEquals("Max arg 1 ", DatasetFactory.createFromObject(new int[] {3,3,3}), a.argMax(1));
a.set(Double.NaN, 1, 0);
System.out.println(a.toString(true));
assertTrue("Max", Double.isNaN(a.max().doubleValue()));
assertTrue("Max", !Double.isNaN(a.max(true).doubleValue()));
assertTrue("Max 0", equalsWithNaNs(DatasetFactory.createFromObject(new double[] {Double.NaN,9,10,11}), a.max(0)));
assertTrue("Max 1", equalsWithNaNs(DatasetFactory.createFromObject(new double[] {3,Double.NaN,11}), a.max(1)));
assertEquals("Max arg", 4, a.argMax());
assertEquals("Max arg 0 ", DatasetFactory.createFromObject(new int[] {1,2,2,2}), a.argMax(0));
assertEquals("Max arg 1 ", DatasetFactory.createFromObject(new int[] {3,0,3}), a.argMax(1));
assertEquals("Max", 11, a.max(true).doubleValue(), 1e-6);
assertEquals("Max 0", DatasetFactory.createFromObject(new double[] {8,9,10,11}), a.max(true,0));
assertEquals("Max 1", DatasetFactory.createFromObject(new double[] {3,7,11}), a.max(true,1));
assertEquals("Max arg", 11, a.argMax(true));
assertEquals("Max arg 0 ", DatasetFactory.createFromObject(new int[] {2,2,2,2}), a.argMax(true, 0));
assertEquals("Max arg 1 ", DatasetFactory.createFromObject(new int[] {3,3,3}), a.argMax(true, 1));
a.set(Double.NEGATIVE_INFINITY, 1, 1);
System.out.println(a.toString(true));
assertTrue("Max", Double.isNaN(a.max().doubleValue()));
assertTrue("Max", !Double.isNaN(a.max(true).doubleValue()));
assertTrue("Max", Double.isNaN(a.max(false, true).doubleValue()));
assertTrue("Max", !Double.isNaN(a.max(true, false).doubleValue()));
assertEquals("Max", 11, a.max(true).doubleValue(), 1e-6);
assertTrue("+Max", Double.isNaN(a.positiveMax(false).doubleValue()));
assertTrue("+Max", Double.isNaN(a.positiveMax(false, true).doubleValue()));
assertTrue("+Max", !Double.isNaN(a.positiveMax(true, false).doubleValue()));
assertEquals("+Max", 11, a.positiveMax(true).doubleValue(), 1e-6);
assertTrue("Min", Double.isNaN(a.min().doubleValue()));
assertTrue("Min", !Double.isNaN(a.min(true).doubleValue()));
assertTrue("Min", Double.isNaN(a.min(false, true).doubleValue()));
assertTrue("Min", !Double.isNaN(a.min(true, false).doubleValue()));
assertTrue("Min", !Double.isInfinite(a.min().doubleValue()));
assertTrue("Min", !Double.isInfinite(a.min(true).doubleValue()));
assertTrue("Min", !Double.isInfinite(a.min(false, true).doubleValue()));
assertTrue("Min", Double.isInfinite(a.min(true, false).doubleValue()));
assertEquals("Min", 0, a.min(true).doubleValue(), 1e-6);
assertTrue("+Min", !Double.isNaN(a.positiveMin(true).doubleValue()));
assertTrue("+Min", Double.isNaN(a.positiveMin(false).doubleValue()));
assertTrue("+Min", Double.isNaN(a.positiveMin(false, true).doubleValue()));
assertTrue("+Min", !Double.isNaN(a.positiveMin(true, false).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(true).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(false).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(true, false).doubleValue()));
assertTrue("+Min", !Double.isInfinite(a.positiveMin(false, true).doubleValue()));
assertEquals("+Min", 1, a.positiveMin(true).doubleValue(), 1e-6);
// test other code path
Dataset b = DatasetFactory.createRange(12, Dataset.FLOAT64);
b.setShape(3,4);
b.mean(); // trigger summary stats calculation
assertEquals("Max", 11, b.max().doubleValue(), 1e-6);
assertEquals("Max arg", 11, b.argMax());
b.set(Double.NaN, 1, 0);
b.mean(); // trigger summary stats calculation
assertTrue("Max", Double.isNaN(b.max().doubleValue()));
assertEquals("Max arg", 4, b.argMax());
b.mean(true);
assertEquals("Max", 11, b.max(true).doubleValue(), 1e-6);
assertEquals("Max arg", 11, b.argMax(true));
// check strided datasets give same max/min positions
a = DatasetFactory.createRange(12, Dataset.FLOAT64).reshape(3,4);
b = a.getSliceView(new Slice(1, null), new Slice(0, null, 2));
Dataset c = a.getSlice(new Slice(1, null), new Slice(0, null, 2));
Assert.assertEquals(c.argMax(), b.argMax());
Assert.assertEquals(c.argMin(), b.argMin());
}
@Test
public void testGetSpeed() {
final int ITERATIONS = 1000;
Dataset a = DatasetFactory.createRange(1000000, Dataset.FLOAT64);
long start, startN, startP;
start = -System.nanoTime();
for (int i = 0; i < 10; i++) {
a.getDouble(i);
}
start += System.nanoTime();
start = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(i);
}
start += System.nanoTime();
startN = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(new int[] {i});
}
startN += System.nanoTime();
startP = -System.nanoTime();
int[] pos = new int[1];
for (int i = 0; i < ITERATIONS; i++) {
pos[0] = i;
a.getDouble(pos);
}
startP += System.nanoTime();
System.out.printf("Get 1D double took %gus (cf %gus and %gus)\n", start*1e-3/ITERATIONS, startN*1e-3/ITERATIONS, startP*1e-3/ITERATIONS);
a.setShape(1000, 1000);
start = -System.nanoTime();
for (int i = 0; i < 10; i++) {
a.getDouble(i, i);
}
start += System.nanoTime();
start = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(i, i);
}
start += System.nanoTime();
startN = -System.nanoTime();
for (int i = 0; i < ITERATIONS; i++) {
a.getDouble(new int[] {i, i});
}
startN += System.nanoTime();
startP = -System.nanoTime();
pos = new int[2];
for (int i = 0; i < ITERATIONS; i++) {
pos[0] = i;
pos[1] = i;
a.getDouble(pos);
}
startP += System.nanoTime();
System.out.printf("Get 2D double took %gus (cf %gus and %gus)\n", start*1e-3/ITERATIONS, startN*1e-3/ITERATIONS, startP*1e-3/ITERATIONS);
}
@Test
public void testHash() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.setShape(3,4);
Dataset b = DatasetFactory.createRange(12, Dataset.FLOAT64);
b.setShape(3,4);
b.mean(); // trigger other code path
assertEquals("Hash code", a.hashCode(), b.hashCode());
a.set(Double.POSITIVE_INFINITY, 1, 0);
b.set(Double.POSITIVE_INFINITY, 1, 0);
b.mean(); // trigger other code path
assertEquals("Hash code", a.hashCode(), b.hashCode());
a.set(Double.NaN, 0, 1);
b.set(Double.NaN, 0, 1);
b.mean(); // trigger other code path
assertEquals("Hash code", a.hashCode(), b.hashCode());
}
private static boolean equalsWithNaNs(Dataset a, Dataset b) {
if (a.equals(b))
return true;
IndexIterator ita = a.getIterator();
IndexIterator itb = b.getIterator();
while (ita.hasNext() && itb.hasNext()) {
final double av = a.getElementDoubleAbs(ita.index);
final double bv = b.getElementDoubleAbs(itb.index);
if (Double.isNaN(av)) {
if (!Double.isNaN(bv))
return false;
} else {
if (av != bv)
return false;
}
}
return true;
}
@Test
public void testMaxSpeed() {
long start;
long elapsed;
final int ITERATIONS = 200;
Dataset a = DatasetFactory.createRange(1000000, Dataset.FLOAT64);
for (int i = 0; i < 10; i++) {
a.set(1, 0);
start = -System.nanoTime();
a.max();
start += System.nanoTime();
}
elapsed = 0;
for (int i = 0; i < ITERATIONS; i++) {
a.set(1, 0);
start = -System.nanoTime();
a.max();
start += System.nanoTime();
elapsed += start;
}
System.out.printf("Max double calculation took %g ms\n", elapsed*1e-6/ITERATIONS);
a = DatasetFactory.createRange(1000000, Dataset.INT16);
elapsed = 0;
for (int i = 0; i < ITERATIONS; i++) {
a.set(1, 0);
start = -System.nanoTime();
a.max();
start += System.nanoTime();
elapsed += start;
}
System.out.printf("Max short calculation took %g ms\n", elapsed*1e-6/ITERATIONS);
}
@Test
public void testSort() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.set(Double.NaN, 0);
a.setShape(3, 4);
a.sort(-1);
TestUtils.verbosePrintf("%s\n", a.toString());
assertEquals("First element", 1, a.getDouble(0,0), 1e-6);
assertTrue("0,3 element", Double.isNaN(a.getDouble(0,3)));
assertEquals("Final element", 11, a.getDouble(2,3), 1e-6);
a.sort(0);
TestUtils.verbosePrintf("%s\n", a.toString());
assertEquals("First element", 1, a.getDouble(0,0), 1e-6);
assertEquals("0,3 element", 7, a.getDouble(0,3), 1e-6);
assertTrue("Final element", Double.isNaN(a.getDouble(2,3)));
a.set(12, 0);
a.sort(null);
TestUtils.verbosePrintf("%s\n", a.toString());
assertEquals("First element", 2, a.getDouble(0), 1e-6);
assertEquals("2,2 element", 12, a.getDouble(2,2), 1e-6);
assertTrue("Final element", Double.isNaN(a.getDouble(2,3)));
}
@Test
public void testPut() {
Dataset d1 = DatasetFactory.createRange(6, Dataset.FLOAT64);
DatasetUtils.put(d1, new int[] {2, 5}, DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {0, 1, -2, 3, 4, -5.5}));
DatasetUtils.put(d1, DatasetFactory.createFromObject(new int[] {0, 4}), DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {-2, 1, -2, 3, -5.5, -5.5}));
d1 = DatasetFactory.createRange(6, Dataset.FLOAT64).reshape(2, 3);
DatasetUtils.put(d1, new int[] {2, 5}, DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {0, 1, -2, 3, 4, -5.5}).reshape(2, 3));
DatasetUtils.put(d1, DatasetFactory.createFromObject(new int[] {0, 4}), DatasetFactory.createFromObject(new double[] {-2, -5.5}));
checkDatasets(d1, DatasetFactory.createFromObject(new double[] {-2, 1, -2, 3, -5.5, -5.5}).reshape(2, 3));
}
@Test
public void testTake() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
Dataset t;
t = DatasetUtils.take(a, new int[] {0, 2, 4}, null);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {0, 2, 4}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
a.setShape(new int[] {3,4});
TestUtils.verbosePrintf("%s\n", a.toString());
t = DatasetUtils.take(a, new int[] {0}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {1}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {2}, 0);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {0}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {1}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {2}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
t = DatasetUtils.take(a, new int[] {3}, 1);
TestUtils.verbosePrintf("%s\n", t.toString());
}
/**
* Tests for squeeze method
*/
@Test
public void testSqueeze() {
Dataset ds = DatasetFactory.createRange(10, Dataset.FLOAT64);
ds.setShape(2,1,5);
ds.squeeze();
assertEquals(2, ds.getShape().length);
assertEquals(2, ds.getShape()[0]);
assertEquals(5, ds.getShape()[1]);
int[] os, ns;
os = new int[] { 1, 1 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(0, ns.length);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(0, ns.length);
os = new int[] { 2, 1, 5 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
os = new int[] { 2, 1, 5, 1 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
os = new int[] { 1, 2, 1, 5 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
os = new int[] { 1, 2, 1, 5, 1 };
ns = AbstractDataset.squeezeShape(os, false);
assertEquals(2, ns.length);
assertEquals(2, ns[0]);
assertEquals(5, ns[1]);
ns = AbstractDataset.squeezeShape(os, true);
assertEquals(3, ns.length);
assertEquals(2, ns[0]);
assertEquals(1, ns[1]);
assertEquals(5, ns[2]);
}
/**
* Tests for tile method
*/
@Test
public void testTile() {
// 1D
Dataset ds = DatasetFactory.createRange(3, Dataset.FLOAT64);
Dataset ta = DatasetUtils.tile(ds, 2);
double[] xa = { 0., 1., 2., 0., 1., 2. };
assertEquals(1, ta.getShape().length);
assertEquals(6, ta.getShape()[0]);
for (int i = 0; i < xa.length; i++) {
assertEquals(xa[i], ((DoubleDataset) ta).getData()[i], 1e-6);
}
Dataset tb = DatasetUtils.tile(ds, 1, 2);
assertEquals(2, tb.getShape().length);
assertEquals(1, tb.getShape()[0]);
assertEquals(6, tb.getShape()[1]);
for (int i = 0; i < xa.length; i++) {
assertEquals(xa[i], ((DoubleDataset) tb).getData()[i], 1e-6);
}
Dataset tc = DatasetUtils.tile(ds, 2, 1);
assertEquals(2, tc.getShape().length);
assertEquals(2, tc.getShape()[0]);
assertEquals(3, tc.getShape()[1]);
for (int i = 0; i < xa.length; i++) {
assertEquals(xa[i], ((DoubleDataset) tc).getData()[i], 1e-6);
}
// 2D
ds = DatasetFactory.createRange(6, Dataset.FLOAT64);
ds.setShape(2,3);
Dataset td = DatasetUtils.tile(ds, 2);
double[] xd = { 0., 1., 2., 0., 1., 2., 3., 4., 5., 3., 4., 5. };
assertEquals(2, td.getShape().length);
assertEquals(2, td.getShape()[0]);
assertEquals(6, td.getShape()[1]);
for (int i = 0; i < xd.length; i++) {
assertEquals(xd[i], ((DoubleDataset) td).getData()[i], 1e-6);
}
Dataset te = DatasetUtils.tile(ds, 1, 2);
assertEquals(2, te.getShape().length);
assertEquals(2, te.getShape()[0]);
assertEquals(6, te.getShape()[1]);
for (int i = 0; i < xd.length; i++) {
assertEquals(xd[i], ((DoubleDataset) te).getData()[i], 1e-6);
}
Dataset tf = DatasetUtils.tile(ds, 2, 1);
double[] xf = { 0., 1., 2., 3., 4., 5., 0., 1., 2., 3., 4., 5. };
assertEquals(2, tf.getShape().length);
assertEquals(4, tf.getShape()[0]);
assertEquals(3, tf.getShape()[1]);
for (int i = 0; i < xf.length; i++) {
assertEquals(xf[i], ((DoubleDataset) tf).getData()[i], 1e-6);
}
}
@Test
public void testTileSpeed() throws Exception {
int[][] blocks = {{1024,1}, {256,4}, {64,16}, {32,32}, {16,64}, {4, 256}, {1,1024}};
// int[][] blocks = {{1024,64}, {256,64}, {64,64}, {32,64}, {16,64}, {4, 64}, {1,64}};
int[][] shapes = { {1024, 2048}, {2048, 2048}, {2048, 1024} };
for (int b = 0; b < blocks.length; b++) {
for (int s = 0; s < shapes.length; s++) {
for (int n = 0; n < 3; n++)
runTile(blocks[b][0], blocks[b][1], shapes[s][0], shapes[s][1]);
}
}
}
private void runTile(final int srows, final int scols, final int rows, final int cols) throws Exception {
Dataset a = DatasetFactory.createRange(srows*scols, Dataset.FLOAT64).reshape(srows, scols);
long start, end;
TestUtils.verbosePrintf("Tile %sx%d Block %dx%d: ", rows, cols, srows, scols);
final int nrows = rows/srows;
final int ncols = cols/scols;
start = System.currentTimeMillis();
DoubleDataset b = new DoubleDataset(rows, cols);
final double[] da = (double[]) a.getBuffer();
final double[] db = b.getData();
if (scols == 1) {
for (int i = 0; i < db.length; i++) {
db[i] = da[(i / cols) % srows];
}
} else if (srows == 1) {
for (int i = 0; i < db.length; i++) {
db[i] = da[i % scols];
}
} else {
for (int i = 0; i < db.length; i++) {
db[i] = da[((i / cols) % srows) * scols + i % scols];
}
}
end = System.currentTimeMillis();
long diff1 = end - start;
TestUtils.verbosePrintf("array = %d ms, ", diff1);
start = System.currentTimeMillis();
final Dataset tiled = DatasetUtils.tile(a, nrows, ncols);
end = System.currentTimeMillis();
long diff2 = end - start;
TestUtils.verbosePrintf("tile = %d ms\n", diff2);
assertEquals(rows, tiled.getShape()[0]);
assertEquals(cols, tiled.getShape()[1]);
if (!tiled.equals(b))
throw new Exception("Datasets not equal!");
if (diff2 > (diff1 * 20))
throw new Exception("Creation of tile took more than 20x as long as array creation of same size! (It took "
+ diff2 + ")");
}
/**
* Tests for transpose method
*/
@Test
public void testTranspose() {
// 2D
Dataset ds = DatasetFactory.createRange(6, Dataset.FLOAT64);
ds.setShape(2,3);
Dataset ta = DatasetUtils.transpose(ds, 1, 0);
double[][] xa = { { 0., 1., 2. }, { 3., 4., 5. } };
assertEquals(2, ta.getShape().length);
assertEquals(3, ta.getShape()[0]);
assertEquals(2, ta.getShape()[1]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(xa[i][j], ta.getDouble(j, i), 1e-6);
}
}
ds.set(-2, 1, 2);
assertEquals(-2., ds.getDouble(1, 2), 1e-6);
assertEquals(5., ta.getDouble(2, 1), 1e-6);
ta = ds.getTransposedView(); // test view has changed
assertEquals(-2., ta.getDouble(2, 1), 1e-6);
// 3D
ds = DatasetFactory.createRange(24, Dataset.FLOAT64);
ds.setShape(2, 3, 4);
double[][][] xb = { {{ 0., 1., 2., 3.}, {4., 5., 6., 7.}, {8., 9., 10., 11. }},
{{12., 13., 14., 15.}, {16., 17., 18., 19.}, {20., 21., 22., 23.}} };
Dataset tb;
try {
tb = DatasetUtils.transpose(ds, 0);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tb = DatasetUtils.transpose(ds, 0, -1, 0);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tb = DatasetUtils.transpose(ds, 0, 1, 1);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
tb = DatasetUtils.transpose(ds, 0, 1, 2);
assertEquals(3, tb.getShape().length);
assertEquals(2, tb.getShape()[0]);
assertEquals(3, tb.getShape()[1]);
assertEquals(4, tb.getShape()[2]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 4; k++) {
assertEquals(xb[i][j][k], tb.getDouble(i, j, k), 1e-6);
}
}
}
tb = DatasetUtils.transpose(ds, 1, 0, 2);
assertEquals(3, tb.getShape().length);
assertEquals(3, tb.getShape()[0]);
assertEquals(2, tb.getShape()[1]);
assertEquals(4, tb.getShape()[2]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 4; k++) {
assertEquals(xb[i][j][k], tb.getDouble(j, i, k), 1e-6);
}
}
}
tb = DatasetUtils.transpose(ds, 2, 0, 1);
assertEquals(3, tb.getShape().length);
assertEquals(4, tb.getShape()[0]);
assertEquals(2, tb.getShape()[1]);
assertEquals(3, tb.getShape()[2]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 4; k++) {
assertEquals(xb[i][j][k], tb.getDouble(k, i, j), 1e-6);
}
}
}
}
/**
* Tests for repeat method
*/
@Test
public void testRepeat() {
// 2D
Dataset ds = DatasetFactory.createRange(6, Dataset.FLOAT64);
ds.setShape(2,3);
double[] xa = { 0., 0., 1., 1., 2., 2., 3., 3., 4., 4., 5., 5. };
DoubleDataset ta = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2}, -1);
assertEquals(1, ta.getShape().length);
assertEquals(12, ta.getShape()[0]);
for (int i = 0; i < 12; i++) {
assertEquals(xa[i], ta.get(i), 1e-6);
}
double[][] xb = { { 0., 0., 1., 1., 2., 2. }, { 3., 3., 4., 4., 5., 5. } };
DoubleDataset tb = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2}, 1);
assertEquals(2, tb.getShape().length);
assertEquals(2, tb.getShape()[0]);
assertEquals(6, tb.getShape()[1]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 6; j++) {
assertEquals(xb[i][j], tb.get(i, j), 1e-6);
}
}
double[][] xc = { { 0., 1., 2. }, { 0., 1., 2. }, { 3., 4., 5. }, { 3., 4., 5. } };
DoubleDataset tc = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2}, 0);
assertEquals(2, tc.getShape().length);
assertEquals(4, tc.getShape()[0]);
assertEquals(3, tc.getShape()[1]);
for (int i = 0; i < 4; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(xc[i][j], tc.get(i, j), 1e-6);
}
}
double[][] xd = { { 0., 1., 2. }, { 0., 1., 2. }, { 3., 4., 5. } };
DoubleDataset td = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2, 1}, 0);
assertEquals(2, td.getShape().length);
assertEquals(3, td.getShape()[0]);
assertEquals(3, td.getShape()[1]);
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
assertEquals(xd[i][j], td.get(i, j), 1e-6);
}
}
double[][] xe = { { 0., 1., 1., 2., 2., 2.}, { 3., 4., 4., 5., 5., 5. } };
DoubleDataset te = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {1, 2, 3}, 1);
assertEquals(2, te.getShape().length);
assertEquals(2, te.getShape()[0]);
assertEquals(6, te.getShape()[1]);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 6; j++) {
assertEquals(xe[i][j], te.get(i, j), 1e-6);
}
}
double[] xf = { 0., 1., 2., 2., 5., 5., 5. };
DoubleDataset tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {1, 1, 2, 0, 0, 3}, -1);
assertEquals(1, tf.getShape().length);
assertEquals(7, tf.getShape()[0]);
for (int i = 0; i < 7; i++) {
assertEquals(xf[i], tf.get(i), 1e-6);
}
try {
tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {0}, 3);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {2, 1}, -1);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
try {
tf = (DoubleDataset) DatasetUtils.repeat(ds, new int[] {-1}, -1);
} catch (IllegalArgumentException e) {
// this is correct.
} catch (Exception e) {
fail("wrong exception type passed from incorrect arguments being passed to the constructor");
}
}
/**
* Tests for resize method
*/
@Test
public void testResize() {
int size = 6;
Dataset ds = DatasetFactory.createRange(size, Dataset.FLOAT64);
DoubleDataset tf;
IndexIterator it;
tf = (DoubleDataset) DatasetUtils.resize(ds, 3);
assertArrayEquals(new int[] {3}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 8);
assertArrayEquals(new int[] {8}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 3, 4);
assertArrayEquals(new int[] {3, 4}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
ds.setShape(2,3);
tf = (DoubleDataset) DatasetUtils.resize(ds, 3);
assertArrayEquals(new int[] {3}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 8);
assertArrayEquals(new int[] {8}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
tf = (DoubleDataset) DatasetUtils.resize(ds, 3, 4);
assertArrayEquals(new int[] {3, 4}, tf.getShape());
it = tf.getIterator();
while (it.hasNext()) {
assertEquals(it.index % size, tf.getElementDoubleAbs(it.index), 1e-6);
}
}
/**
* Test contents functions
*/
@Test
public void testContents() {
double[] x = { 0, 2., -12.3 };
double[] y = { 2.3, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY };
double[] z = { 1e14, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY };
DoubleDataset ta = new DoubleDataset(x);
assertEquals(false, ta.containsNans());
assertEquals(false, ta.containsInfs());
DoubleDataset tb = new DoubleDataset(y);
assertEquals(true, tb.containsNans());
assertEquals(true, tb.containsInfs());
assertEquals(true, Double.isNaN(tb.min().doubleValue()));
assertEquals(false, Double.isInfinite(tb.min().doubleValue()));
assertEquals(true, Double.isNaN(tb.max().doubleValue()));
assertEquals(false, Double.isInfinite(tb.max().doubleValue()));
Dataset f = tb.cast(Dataset.FLOAT32);
assertEquals(true, f.containsNans());
assertEquals(true, f.containsInfs());
assertEquals(true, Double.isNaN(f.min().doubleValue()));
assertEquals(false, Double.isInfinite(f.min().doubleValue()));
assertEquals(true, Double.isNaN(f.max().doubleValue()));
assertEquals(false, Double.isInfinite(f.max().doubleValue()));
DoubleDataset tc = new DoubleDataset(z);
assertEquals(true, Double.isInfinite(tc.min().doubleValue()));
assertEquals(true, Double.isInfinite(tc.max().doubleValue()));
}
@Test
public void testView() {
Dataset a = DatasetFactory.createRange(20, Dataset.FLOAT64);
Dataset b = a.getView();
assertEquals(true, a.equals(b));
}
/**
* Test equals and hashCode
*/
@Test
public void testEquals() {
Dataset a, b, c, d, e;
a = DatasetFactory.createRange(20, Dataset.FLOAT64);
b = DatasetFactory.createRange(20, Dataset.FLOAT64);
c = a.clone();
d = Maths.add(a, 0.5);
e = DatasetFactory.createRange(20, Dataset.FLOAT32);
assertTrue(a.equals(b));
assertFalse(a == b);
assertTrue(a.equals(c));
assertFalse(a == c);
assertFalse(a.equals(d));
assertFalse(a.equals(e));
HashSet<Dataset> set = new HashSet<Dataset>();
set.add(a);
assertTrue(set.contains(a));
assertTrue(set.contains(b));
assertTrue(set.contains(c));
assertFalse(set.contains(d));
assertFalse(set.contains(e));
set.add(b);
assertEquals(1, set.size());
set.add(d);
set.add(e);
assertEquals(3, set.size());
assertTrue(set.contains(d));
assertTrue(set.contains(e));
assertTrue(set.contains(Maths.subtract(d, 0.5)));
assertFalse(set.contains(Maths.subtract(d, 0.5001)));
assertTrue(set.contains(e.cast(Dataset.FLOAT64)));
assertTrue(set.contains(b.cast(Dataset.FLOAT32)));
}
@Test
public void testPrint() {
Dataset a = DatasetFactory.createRange(1000000, Dataset.INT32);
System.out.println(a);
System.out.println(a.reshape(1000, 1000));
System.out.println(a.reshape(100, 100, 100));
// System.out.println(a.reshape(10, 10, 100, 100));
Dataset b = DatasetFactory.createRange(12, Dataset.INT32);
System.out.println(b);
System.out.println(b.reshape(1,12));
System.out.println(b.reshape(4,1,3));
}
@Test
public void testSlicing() {
Dataset a = DatasetFactory.createRange(1000, Dataset.INT32);
Dataset s, t;
IndexIterator is, it;
s = a.getSlice(null, new int[] {10}, null);
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
is = s.getIterator();
for (int i = 0; is.hasNext(); i++) {
assertEquals(i, s.getElementLongAbs(is.index));
}
t = a.getSlice(new Slice(10));
assertEquals(1, t.getShape().length);
assertEquals(10, t.getShape()[0]);
it = t.getIterator();
for (int i = 0; it.hasNext(); i++) {
assertEquals(i, t.getElementLongAbs(it.index));
}
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(new int[]{0}, new int[] {10}, null);
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
s = a.getSlice(new int[]{-1000}, new int[] {10}, null);
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
s = a.getSlice(new int[] {9}, null, new int[] {-1});
assertEquals(1, s.getShape().length);
assertEquals(10, s.getShape()[0]);
is = s.getIterator();
for (int i = 9; is.hasNext(); i--) {
assertEquals(i, s.getElementLongAbs(is.index));
}
t = a.getSlice(new Slice(9, null, -1));
assertEquals(1, t.getShape().length);
assertEquals(10, t.getShape()[0]);
it = t.getIterator();
for (int i = 9; it.hasNext(); i--) {
assertEquals(i, t.getElementLongAbs(it.index));
}
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(new int[] {2}, new int[] {10}, null);
t = a.getSlice(new Slice(2, 10));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(new int[] {2}, new int[] {10}, new int[] {3});
t = a.getSlice(new Slice(2, 10, 3));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
t = a.getSlice(new Slice(2000));
assertArrayEquals(a.getShapeRef(), t.getShapeRef());
t = a.getSlice(new Slice(12, 10, 3));
assertArrayEquals(new int[] {0}, t.getShapeRef());
t = a.getSlice(new Slice(2, 10, -3));
assertArrayEquals(new int[] {0}, t.getShapeRef());
a.setShape(10, 10, 10);
s = a.getSlice(null, null, null);
t = a.getSlice();
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, null, null);
Slice[] slice = null;
t = a.getSlice(slice);
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, new int[] {8, 10, 10}, null);
t = a.getSlice(new Slice(8));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, new int[] {8, 3, 10}, null);
t = a.getSlice(new Slice(8), new Slice(3));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
s = a.getSlice(null, new int[] {8, 10, 3}, null);
t = a.getSlice(new Slice(8), null, new Slice(3));
is = s.getIterator();
it = t.getIterator();
while (is.hasNext() && it.hasNext()) {
assertEquals(s.getElementLongAbs(is.index), t.getElementLongAbs(it.index));
}
}
@Test
public void testSlicingViews() {
DoubleDataset a, b, c;
a = (DoubleDataset) DatasetFactory.createRange(32, Dataset.FLOAT64).reshape(4, 8);
checkSliceView(a, new int[] {0, 1}, new int[] {3, 5}, new int[] {1, 2});
checkSliceView(a, new int[] {1, -1}, new int[] {-1, 3}, new int[] {1, -2});
a = (DoubleDataset) DatasetFactory.createRange(60, Dataset.FLOAT64).reshape(6, 10);
b = checkSliceView(a, new int[] {0, 1}, new int[] {6, 8}, new int[] {1, 2}); // 6x4
c = (DoubleDataset) b.getSliceView(new int[] {0, 1}, new int[] {1, 4}, null);
c.setShape(3);
checkSliceView(b, new int[] {1, 0}, new int[] {5, 3}, new int[] {2, 1});
checkSliceView(b, new int[] {1, -1}, new int[] {5, 2}, new int[] {2, -1});
c = (DoubleDataset) a.getSlice(new int[] {0, 1}, new int[] {6, 8}, new int[] {1, 2});
b.setShape(2,3,4);
c.setShape(2,3,4);
assertEquals(c, b);
b.setShape(6,4);
b.setShape(6,2,2);
c.setShape(6,2,2);
assertEquals(c, b);
b.setShape(6,4);
try {
b.setShape(2,12);
fail("Should have raised an exception");
} catch (IllegalArgumentException iae) {
// expected
} catch (Exception e) {
fail("Unexpected exception: " + e);
}
b = checkSliceView(a, new int[] {1, -1}, new int[] {-1, 2}, new int[] {1, -2}); // 4x4
checkSliceView(b, new int[] {1, 0}, new int[] {4, 3}, new int[] {2, 1});
checkSliceView(b, new int[] {1, -1}, new int[] {4, 2}, new int[] {2, -1});
b = checkSliceView(a, new int[] {0, 1}, new int[] {1, 8}, new int[] {1, 2}); // 1x4
b = checkSliceView(a, new int[] {0, 1}, new int[] {6, 2}, new int[] {1, 2}); // 6x1
// test special case of zero-rank dataset
a = (DoubleDataset) DatasetFactory.createFromObject(1., Dataset.FLOAT64);
b = (DoubleDataset) a.getSliceView();
b.setShape(1);
assertTrue(b.getIterator().hasNext());
}
private DoubleDataset checkSliceView(DoubleDataset a, int[] start, int[] stop, int[] step) {
DoubleDataset s = (DoubleDataset) a.getSliceView(start, stop, step).squeeze();
DoubleDataset t = (DoubleDataset) a.getSlice(start, stop, step).squeeze();
assertArrayEquals(t.getShape(), s.getShape());
assertEquals(t.toString(true), t, s);
IndexIterator iter = s.getIterator(true);
int[] pos = iter.getPos();
while (iter.hasNext()) {
assertEquals(iter.index, ((AbstractDataset) s).get1DIndex(pos));
int[] p = s.getNDPosition(iter.index);
assertArrayEquals(Arrays.toString(pos) + " : " + Arrays.toString(p), pos, p);
}
// test for correct copying of non-contiguous datasets
assertArrayEquals(((DoubleDataset) t.flatten()).getData(), ((DoubleDataset) s.flatten()).getData(), 1e-15);
TestUtils.assertEquals("Max", t.max().doubleValue(), s.max().doubleValue());
TestUtils.assertEquals("Min", t.min().doubleValue(), s.min().doubleValue());
return s;
}
@Test
public void testSliceStrings() {
String s;
s = Slice.createString(new int[] {3}, null, null, null);
assertEquals(":", s);
s = Slice.createString(new int[] {3}, null, null, new int[] {1});
assertEquals(":", s);
s = Slice.createString(new int[] {3}, null, new int[] {2}, new int[] {1});
assertEquals(":2", s);
s = Slice.createString(new int[] {4}, new int[] {1}, new int[] {3}, new int[] {1});
assertEquals("1:3", s);
s = Slice.createString(new int[] {4}, new int[] {1}, new int[] {2}, new int[] {1});
assertEquals("1", s);
s = Slice.createString(new int[] {4}, new int[] {1}, new int[] {3}, new int[] {2});
assertEquals("1", s);
s = Slice.createString(new int[] {5}, null, null, new int[] {2});
assertEquals("::2", s);
s = Slice.createString(new int[] {5}, new int[] {1}, new int[] {4}, new int[] {2});
assertEquals("1:4:2", s);
s = Slice.createString(new int[] {5}, new int[] {1}, new int[] {5}, new int[] {2});
assertEquals("1::2", s);
s = Slice.createString(new int[] {5}, new int[] {1}, new int[] {3}, new int[] {2});
assertEquals("1", s);
s = Slice.createString(new int[] {3}, null, null, new int[] {-1});
assertEquals("::-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {1}, new int[] {-1});
assertEquals("3:1:-1", s);
s = Slice.createString(new int[] {5}, new int[] {4}, new int[] {1}, new int[] {-1});
assertEquals(":1:-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {0}, new int[] {-1});
assertEquals("3:0:-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {-1}, new int[] {-1});
assertEquals("3::-1", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {2}, new int[] {-1});
assertEquals("3", s);
s = Slice.createString(new int[] {5}, new int[] {3}, new int[] {1}, new int[] {-2});
assertEquals("3", s);
s = Slice.createString(new int[] {3, 2}, null, null, null);
assertEquals(":,:", s);
}
@Test
public void testSetSlice() {
Dataset a = DatasetFactory.createRange(100, Dataset.FLOAT64).reshape(20, 5);
a.setSlice(-2, null, new Slice(null, null, 2));
assertEquals(-2, a.getDouble(0, 0), 1e-15);
assertEquals(1, a.getDouble(0, 1), 1e-15);
assertEquals(-2, a.getDouble(0, 2), 1e-15);
assertEquals(3, a.getDouble(0, 3), 1e-15);
assertEquals(-2, a.getDouble(0, 4), 1e-15);
// with broadcasting
a = DatasetFactory.createRange(100, Dataset.FLOAT64).reshape(20, 5);
a.setSlice(DatasetFactory.createRange(3, Dataset.INT16), new Slice(2, 10), new Slice(null, null, 2));
assertEquals(0, a.getDouble(0, 0), 1e-15);
assertEquals(1, a.getDouble(0, 1), 1e-15);
assertEquals(2, a.getDouble(0, 2), 1e-15);
assertEquals(3, a.getDouble(0, 3), 1e-15);
assertEquals(4, a.getDouble(0, 4), 1e-15);
assertEquals(5, a.getDouble(1, 0), 1e-15);
assertEquals(6, a.getDouble(1, 1), 1e-15);
assertEquals(7, a.getDouble(1, 2), 1e-15);
assertEquals(8, a.getDouble(1, 3), 1e-15);
assertEquals(9, a.getDouble(1, 4), 1e-15);
assertEquals(0, a.getDouble(2, 0), 1e-15);
assertEquals(11, a.getDouble(2, 1), 1e-15);
assertEquals(1, a.getDouble(2, 2), 1e-15);
assertEquals(13, a.getDouble(2, 3), 1e-15);
assertEquals(2, a.getDouble(2, 4), 1e-15);
// compound
CompoundDataset c = DatasetFactory.createRange(3, 100, Dataset.ARRAYFLOAT64).reshape(20, 5);
c.setSlice(DatasetFactory.createRange(3, Dataset.INT16), new Slice(2, 10), new Slice(null, null, 2));
}
@Test
public void test1DErrors() {
// test 1D errors for single value
Dataset a = DatasetFactory.createRange(100, Dataset.INT32);
a.setError(5);
assertEquals(5.0, a.getError(0), 0.001);
assertEquals(5.0, a.getError(50), 0.001);
assertEquals(5.0, a.getError(99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array
Dataset error = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(5.0, error.getDouble(0), 0.001);
assertEquals(5.0, error.getDouble(50), 0.001);
assertEquals(5.0, error.getDouble(99), 0.001);
// Now set the error as a whole array
a.setError(Maths.multiply(error, 2));
assertEquals(10.0, a.getError(0), 0.001);
assertEquals(10.0, a.getError(50), 0.001);
assertEquals(10.0, a.getError(99), 0.001);
// test pulling the error out again, to make sure its correct
Dataset error2 = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error2);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(10.0, error2.getDouble(0), 0.001);
assertEquals(10.0, error2.getDouble(50), 0.001);
assertEquals(10.0, error2.getDouble(99), 0.001);
}
@Test
public void test2DErrors() {
// test 1D errors for single value
Dataset a = DatasetFactory.zeros(new int[] {100,100}, Dataset.INT32);
a.setError(5);
assertEquals(5.0, a.getError(0,0), 0.001);
assertEquals(5.0, a.getError(50,50), 0.001);
assertEquals(5.0, a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array
Dataset error = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(5.0, error.getDouble(0,0), 0.001);
assertEquals(5.0, error.getDouble(50,50), 0.001);
assertEquals(5.0, error.getDouble(99,99), 0.001);
// Now set the error as a whole array
a.setError(Maths.multiply(error, 2));
assertEquals(10.0, a.getError(0,0), 0.001);
assertEquals(10.0, a.getError(50,50), 0.001);
assertEquals(10.0, a.getError(99,99), 0.001);
// test pulling the error out again, to make sure its correct
Dataset error2 = a.getError();
// check compatibility
try {
AbstractDataset.checkCompatibility(a, error2);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
assertEquals(10.0, error2.getDouble(0,0), 0.001);
assertEquals(10.0, error2.getDouble(50,50), 0.001);
assertEquals(10.0, error2.getDouble(99,99), 0.001);
}
@Test
public void testSetErrorBuffer() {
Dataset a = DatasetFactory.zeros(new int[] {100,100}, Dataset.INT32);
Dataset err = DatasetFactory.createLinearSpace(0, a.getSize() - 1, a.getSize(), Dataset.FLOAT64);
err.setShape(a.getShape());
a.setErrorBuffer(null);
assertFalse(a.hasErrors());
a.setErrorBuffer(25.0);
assertEquals(5.0, a.getError(0,0), 0.001);
assertEquals(5.0, a.getError(50,50), 0.001);
assertEquals(5.0, a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array and check compatibility
Dataset error = a.getError();
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
a.setErrorBuffer(err);
assertEquals(0.0, a.getError(0,0), 0.001);
assertEquals(Math.sqrt(50.0 + 100*50.0), a.getError(50,50), 0.001);
assertEquals(Math.sqrt(99.0 + 100*99.0), a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array and check compatibility
error = a.getError();
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
a.setErrorBuffer(err.getBuffer());
assertEquals(0.0, a.getError(0,0), 0.001);
assertEquals(Math.sqrt(35.0 + 100*25.0), a.getError(25,35), 0.001);
assertEquals(Math.sqrt(99.0 + 100*99.0), a.getError(99,99), 0.001);
assertTrue(a.hasErrors());
// now for pulling out the full error array and check compatibility
error = a.getError();
try {
AbstractDataset.checkCompatibility(a, error);
} catch (Exception e) {
fail("Error shape is not the same as input datasets");
}
}
@Test
public void testInternalErrors() {
// test 1D errors for single value
Dataset a = DatasetFactory.createRange(100, Dataset.INT32);
a.setError(5);
// should be squared
Number ne = (Number) a.getErrorBuffer().getObjectAbs(0);
assertEquals(25.0, ne.doubleValue(), 0.001);
// now for pulling out the full error array
Dataset error = a.getError();
a.setError(Maths.multiply(error, 2));
// should also be squared
Dataset ae = a.getErrorBuffer();
assertEquals(100.0, ae.getDouble(0), 0.001);
assertEquals(100.0, ae.getDouble(50), 0.001);
assertEquals(100.0, ae.getDouble(99), 0.001);
}
@Test
public void testZeroRankDatasets() {
Dataset a;
a = DoubleDataset.ones();
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", 1.0, a.getObject());
assertEquals("Max", 1.0, a.max());
assertEquals("Min", 1.0, a.min());
assertEquals("MaxPos", 0, a.maxPos().length);
assertEquals("MinPos", 0, a.minPos().length);
assertEquals("ArgMax", 0, a.argMax());
assertEquals("ArgMin", 0, a.argMin());
assertEquals("Value", true, a.equals(new Double(1.0)));
a = DatasetFactory.zeros(new int[] {}, Dataset.INT16);
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", (short) 0, a.getObject());
a = DatasetFactory.createFromObject(new Complex(1.0, -0.5));
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", new Complex(1.0, -0.5), a.getObject());
assertEquals("Real view value", 1.0, a.realView().getObject());
assertEquals("Imaginary view value", -0.5, ((ComplexDoubleDataset) a).imagView().getObject());
a = DatasetFactory.createFromObject(1.f);
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
assertEquals("Value", 1.f, a.getObject());
a = DoubleDataset.ones(1);
a.squeeze();
assertEquals("Rank", 0, a.getRank());
assertEquals("Shape", 0, a.getShape().length);
a = DatasetFactory.createFromObject(1.f);
assertEquals("Equals", a, DatasetFactory.createFromObject(1.f));
assertFalse("Differs", a.equals(DatasetFactory.createFromObject(2.f)));
}
@Test
public void testConcatenate() {
Dataset a, b, c, d;
a = DatasetFactory.createRange(6, Dataset.FLOAT64);
b = DatasetFactory.createRange(6, 8, 1, Dataset.FLOAT64);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
d = DatasetFactory.createRange(8, Dataset.FLOAT64);
assertEquals("Rank", 1, c.getRank());
assertTrue("Dataset", c.equals(d));
a = DatasetFactory.createRange(6, Dataset.FLOAT64).reshape(3,2);
b = DatasetFactory.createRange(6, 8, 1, Dataset.FLOAT64).reshape(1,2);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
d = DatasetFactory.createRange(8, Dataset.FLOAT64).reshape(4,2);
assertEquals("Rank", 2, c.getRank());
assertTrue("Dataset", c.equals(d));
a.setShape(2,3);
b = DatasetFactory.createRange(6, 9, 1, Dataset.FLOAT64).reshape(1,3);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
d = DatasetFactory.createRange(9, Dataset.FLOAT64).reshape(3,3);
assertEquals("Rank", 2, c.getRank());
assertTrue("Dataset", c.equals(d));
a = DatasetFactory.createRange(2, Dataset.FLOAT64).reshape(1,2);
b = DatasetFactory.createRange(3, 5, 1, Dataset.FLOAT64).reshape(1,2);
a = DatasetUtils.concatenate(new IDataset[] {a, b}, 0);
b = DatasetFactory.createRange(2, 6, 3, Dataset.FLOAT64).reshape(2,1);
c = DatasetUtils.concatenate(new IDataset[] {a, b}, 1);
d = DatasetFactory.createRange(6, Dataset.FLOAT64).reshape(2,3);
assertEquals("Rank", 2, c.getRank());
assertTrue("Dataset", c.equals(d));
}
@Test
public void testSum() {
Dataset a = DatasetFactory.createRange(1024*1024, Dataset.INT32);
assertEquals("Typed sum", -524288, a.typedSum(Dataset.INT32));
a = DatasetFactory.createRange(12, Dataset.FLOAT64);
a.setShape(3,4);
assertEquals("Sum", 11*6, ((Number) a.sum()).doubleValue(), 1e-6);
a.set(Double.NaN, 0,0);
assertTrue("Sum", Double.isNaN(((Number) a.sum()).doubleValue()));
assertEquals("Sum", 11*6, ((Number) a.sum(true)).doubleValue(), 1e-6);
}
@Test
public void testMakeFinite() {
Dataset a = DatasetFactory.createFromObject(new double[] {0, Double.POSITIVE_INFINITY, Double.NaN, Double.NEGATIVE_INFINITY });
DatasetUtils.makeFinite(a);
assertTrue("Make finite", DatasetFactory.createFromObject(new double[] {0, Double.MAX_VALUE, 0, -Double.MAX_VALUE}).equals(a));
}
@Test
public void testCast() {
long[] udata = new long[] {0, 1, 127, 128, 255, 256, 32767, 32768, 65535, 65536, 2147483647L, 2147483648L, 4294967295L, 4294967296L};
Dataset d = new LongDataset(udata);
Dataset a, c;
c = DatasetUtils.cast(d, Dataset.INT32);
Assert.assertTrue(c.max().doubleValue() < d.max().doubleValue()); // check stored values
a = DatasetFactory.createFromObject(c, true);
assertEquals("", 0, a.getLong(13));
for (int i = 0; i < 13; i++)
assertEquals("", udata[i], a.getLong(i));
c = DatasetUtils.cast(d, Dataset.INT16);
Assert.assertTrue(c.max().doubleValue() < d.max().doubleValue());
a = DatasetFactory.createFromObject(c, true);
assertEquals("", 0, a.getLong(9));
for (int i = 0; i < 9; i++)
assertEquals("", udata[i], a.getLong(i));
c = DatasetUtils.cast(d, Dataset.INT8);
Assert.assertTrue(c.max().doubleValue() < d.max().doubleValue());
a = DatasetFactory.createFromObject(c, true);
assertEquals("", 0, a.getLong(5));
for (int i = 0; i < 5; i++)
assertEquals("", udata[i], a.getLong(i));
}
@Test
public void testRoll() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
Dataset r = DatasetUtils.roll(a, 2, null);
TestUtils.assertDatasetEquals(r, Maths.add(a, 10-2).iremainder(10), 1e-6, 1e-6);
r = DatasetUtils.roll(a, -2, null);
TestUtils.assertDatasetEquals(r, Maths.add(a, 10+2).iremainder(10), 1e-6, 1e-6);
a.setShape(2,5);
r = DatasetUtils.roll(a, 1, null);
TestUtils.assertDatasetEquals(r, Maths.add(a, 10-1).iremainder(10).reshape(2,5), 1e-6, 1e-6);
r = DatasetUtils.roll(a, 1, 0);
TestUtils.assertDatasetEquals(r, Maths.add(a, 5).iremainder(10).reshape(2,5), 1e-6, 1e-6);
r = DatasetUtils.roll(a, 1, 1);
TestUtils.assertDatasetEquals(r, new IntegerDataset(new int[] {4, 0, 1, 2, 3, 9, 5, 6, 7, 8}, 2,5), 1e-6, 1e-6);
}
@Test
public void testRollAxis() {
Dataset a = DatasetFactory.ones(new int[] {3, 4, 5, 6}, Dataset.INT8);
Assert.assertArrayEquals(new int[] {3, 6, 4, 5}, DatasetUtils.rollAxis(a, 3, 1).getShape());
Assert.assertArrayEquals(new int[] {5, 3, 4, 6}, DatasetUtils.rollAxis(a, 2, 0).getShape());
Assert.assertArrayEquals(new int[] {3, 5, 6, 4}, DatasetUtils.rollAxis(a, 1, 4).getShape());
}
@Test
public void testFindOccurrences() {
Dataset a = new DoubleDataset(new double[] {0, 0, 3, 7, -4, 2, 1});
Dataset v = DatasetFactory.createRange(-3, 3, 1, Dataset.FLOAT64);
Dataset indexes = DatasetUtils.findFirstOccurrences(a, v);
TestUtils.assertDatasetEquals(new IntegerDataset(new int[] {-1, -1, -1, 0, 6, 5}, null), indexes, true, 1, 1);
}
@Test
public void testFindIndexes() {
Dataset a = new DoubleDataset(new double[] {0, 0, 3, 7, -4, 2, 1});
Dataset v = DatasetFactory.createRange(-3, 3, 1, Dataset.FLOAT64);
IntegerDataset indexes = DatasetUtils.findIndexesForValues(a, v);
TestUtils.assertDatasetEquals(new IntegerDataset(new int[] {3, 3, -1, -1, -1, 5, 4}, null), indexes, true, 1, 1);
v = new DoubleDataset(new double[] {-4, 0, 1, 2, 3, 7});
indexes = DatasetUtils.findIndexesForValues(a, v);
TestUtils.assertDatasetEquals(a, v.getBy1DIndex(indexes), true, 1e-6, 1e-6);
}
@Test
public void testAppend() {
double[] x = { 0., 1., 2., 3., 4., 5. };
Dataset d1 = DoubleDataset.createRange(3.);
Dataset d2 = DoubleDataset.createRange(3., 6., 1.);
Dataset d3 = DatasetUtils.append(d1, d2, 0);
for (int i = 0; i < x.length; i++) {
assertEquals("Append 1", x[i], d3.getDouble(i), 1e-8);
}
d1.setShape(1, 3);
d2.setShape(1, 3);
d3 = DatasetUtils.append(d1, d2, 0);
Dataset d4 = new DoubleDataset(x);
d4.setShape(2, 3);
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 3; j++) {
assertEquals("Append 2", d4.getDouble(i, j), d3.getDouble(i, j), 1e-8);
}
}
d3 = DatasetUtils.append(d1, d2, 1);
d4 = new DoubleDataset(x);
d4.setShape(1, 6);
for (int i = 0; i < 1; i++) {
for (int j = 0; j < 6; j++) {
assertEquals("Append 3", d4.getDouble(i, j), d3.getDouble(i, j), 1e-8);
}
}
}
public static void checkDatasets(Dataset calc, Dataset expected) {
checkDatasets(expected, calc, 1e-5, 1e-5);
}
public static void checkDatasets(Dataset expected, Dataset calc, double relTol, double absTol) {
checkDatasets(expected, calc, false, relTol, absTol);
}
public static void checkDatasets(Dataset expected, Dataset calc, boolean valuesOnly, double relTol, double absTol) {
int type = expected.getDtype();
if (!valuesOnly) {
Assert.assertEquals("Type", type, calc.getDtype());
Assert.assertEquals("Items", expected.getElementsPerItem(), calc.getElementsPerItem());
}
Assert.assertEquals("Size", expected.getSize(), calc.getSize());
try {
Assert.assertArrayEquals("Shape", expected.getShape(), calc.getShape());
} catch (AssertionError e) {
if (calc.getSize() == 1) {
Assert.assertArrayEquals("Shape", new int[0], calc.getShape());
} else {
throw e;
}
}
IndexIterator at = expected.getIterator(true);
IndexIterator bt = calc.getIterator();
final int eis = expected.getElementsPerItem();
final int cis = calc.getElementsPerItem();
final int is = Math.max(eis, cis);
if (expected.elementClass().equals(Boolean.class)) {
while (at.hasNext() && bt.hasNext()) {
for (int j = 0; j < is; j++) {
boolean e = j >= eis ? false : expected.getElementBooleanAbs(at.index + j);
boolean c = j >= cis ? false : calc.getElementBooleanAbs(bt.index + j);
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + "; " + j +
": ", e, c);
}
}
} else if (expected.hasFloatingPointElements()) {
while (at.hasNext() && bt.hasNext()) {
for (int j = 0; j < is; j++) {
double e = j >= eis ? 0 : expected.getElementDoubleAbs(at.index + j);
double c = j >= cis ? 0 : calc.getElementDoubleAbs(bt.index + j);
double t = Math.max(absTol, relTol*Math.max(Math.abs(e), Math.abs(c)));
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + "; " + j +
": ", e, c, t);
}
}
} else if (type == Dataset.STRING) {
StringDataset es = (StringDataset) expected;
StringDataset cs = (StringDataset) calc;
while (at.hasNext() && bt.hasNext()) {
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + ": ",
es.getAbs(at.index), cs.getAbs(bt.index));
}
} else if (type == Dataset.OBJECT) {
ObjectDataset eo = (ObjectDataset) expected;
ObjectDataset co = (ObjectDataset) calc;
while (at.hasNext() && bt.hasNext()) {
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + ": ",
eo.getAbs(at.index), co.getAbs(bt.index));
}
} else {
while (at.hasNext() && bt.hasNext()) {
for (int j = 0; j < is; j++) {
long e = j >= eis ? 0 : expected.getElementLongAbs(at.index + j);
long c = j >= cis ? 0 : calc.getElementLongAbs(bt.index + j);
Assert.assertEquals("Value does not match at " + Arrays.toString(at.getPos()) + "; " + j +
": ", e, c);
}
}
}
}
@Test
public void testSelect() {
DoubleDataset a = new DoubleDataset(new double[] { 0, 1, 3, 5, -7, -9 });
DoubleDataset b = new DoubleDataset(new double[] { 0.01, 1.2, 2.9, 5, -7.1, -9 });
Dataset c = a.clone().reshape(2, 3);
BooleanDataset d = new BooleanDataset(new boolean[] {false, true, false, false, true, false}, 2, 3);
DoubleDataset e = (DoubleDataset) DatasetUtils.select(new BooleanDataset[] {d}, new Object[] {c}, -2);
checkDatasets(e, new DoubleDataset(new double[] {-2, 1, -2, -2, -7, -2}, 2, 3));
Dataset f = b.clone().reshape(2, 3);
BooleanDataset g = new BooleanDataset(new boolean[] {false, true, true, false, false, false}, 2, 3);
e = (DoubleDataset) DatasetUtils.select(new BooleanDataset[] {d, g}, new Dataset[] {c, f}, -2.5);
checkDatasets(e, new DoubleDataset(new double[] {-2.5, 1, 2.9, -2.5, -7, -2.5}, 2, 3));
e = (DoubleDataset) DatasetUtils.select(d, c, -2);
checkDatasets(e, new DoubleDataset(new double[] {-2, 1, -2, -2, -7, -2}, 2, 3));
}
@Test
public void testChoose() {
DoubleDataset a = new DoubleDataset(new double[] { 0, 1, 3, 5, -7, -9 });
DoubleDataset b = new DoubleDataset(new double[] { 0.01, 1.2, 2.9, 5, -7.1, -9 });
Dataset c = a.clone().reshape(2, 3);
IntegerDataset d = new IntegerDataset(new int[] {0, 0, 1, 1, 0, 1}, 2, 3);
DoubleDataset e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, true, false);
checkDatasets(e, new DoubleDataset(new double[] {0, 1, -2, -2, -7, -2}, 2, 3));
d = new IntegerDataset(new int[] {-2, 0, 3, 1, 0, 2}, 2, 3);
try {
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, true, false);
fail("Should have thrown an array index OOB exception");
} catch (ArrayIndexOutOfBoundsException oob) {
// expected
}
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, false, false);
checkDatasets(e, new DoubleDataset(new double[] {0, 1, -2, -2, -7, -9}, 2, 3));
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, -2}, false, true);
checkDatasets(e, new DoubleDataset(new double[] {0, 1, -2, -2, -7, -2}, 2, 3));
Dataset f = b.clone().reshape(2, 3);
IntegerDataset g = new IntegerDataset(new int[] {1, 0, 1, 1, 2, 2}, 2, 3);
e = (DoubleDataset) DatasetUtils.choose(g, new Object[] {c, f, -2}, true, false);
checkDatasets(e, new DoubleDataset(new double[] {0.01, 1, 2.9, 5, -2, -2}, 2, 3));
g = new IntegerDataset(new int[] {-1, 3, 1, 1, 2, 2}, 2, 3);
try {
e = (DoubleDataset) DatasetUtils.choose(d, new Object[] {c, f, -2}, true, false);
fail("Should have thrown an array index OOB exception");
} catch (ArrayIndexOutOfBoundsException oob) {
// expected
}
e = (DoubleDataset) DatasetUtils.choose(g, new Object[] {c, f, -2}, false, false);
checkDatasets(e, new DoubleDataset(new double[] {-2, 1, 2.9, 5, -2, -2}, 2, 3));
e = (DoubleDataset) DatasetUtils.choose(g, new Object[] {c, f, -2}, false, true);
checkDatasets(e, new DoubleDataset(new double[] {0, -2, 2.9, 5, -2, -2}, 2, 3));
}
@Test
public void testSize() {
int[] zero = new int[] {0};
int[] one = new int[] {};
int[] small = new int[] {2};
int[] medium = new int[] {1024, 1024};
int[] large = new int[] {1024, 1024, 1024};
int[] xxxlarge = new int[] {1024, 1024, 1024, 1024};
int[] bad = new int[] {1024, -1, 1024};
assertEquals(0, AbstractDataset.calcLongSize(zero));
assertEquals(0, AbstractDataset.calcSize(zero));
assertEquals(1, AbstractDataset.calcLongSize(one));
assertEquals(1, AbstractDataset.calcSize(one));
assertEquals(2, AbstractDataset.calcLongSize(small));
assertEquals(2, AbstractDataset.calcSize(small));
assertEquals(1024*1024, AbstractDataset.calcLongSize(medium));
assertEquals(1024*1024, AbstractDataset.calcSize(medium));
assertEquals(1024*1024*1024, AbstractDataset.calcLongSize(large));
assertEquals(1024*1024*1024, AbstractDataset.calcSize(large));
assertEquals(1024*1024*1024*1024L, AbstractDataset.calcLongSize(xxxlarge));
try {
AbstractDataset.calcSize(xxxlarge);
fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// expected
} catch (Throwable t) {
fail("Should have thrown an illegal argument exception");
}
try {
AbstractDataset.calcLongSize(bad);
fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// expected
} catch (Throwable t) {
fail("Should have thrown an illegal argument exception");
}
try {
AbstractDataset.calcSize(bad);
fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// expected
} catch (Throwable t) {
fail("Should have thrown an illegal argument exception");
}
}
@Test
public void testFill() {
Dataset a = DatasetFactory.createRange(12, Dataset.FLOAT64);
Dataset b = DatasetFactory.zeros(a);
a.fill(0);
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(0.);
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(0L);
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(new Complex(0));
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(DatasetFactory.createFromObject(0));
checkDatasets(a, b, 1e-15, 1e-20);
a.fill(DatasetFactory.createFromObject(new int[] {0}));
checkDatasets(a, b, 1e-15, 1e-20);
try {
a.fill(DatasetFactory.createFromObject(new int[] {0, 1}));
fail();
} catch (IllegalArgumentException e) {
}
}
@Test
public void testPositions() {
int[] shape = new int[] { 23, 34, 2 };
int[] indexes = new int[] {1, 10, 70, 171};
List<IntegerDataset> list = DatasetUtils.calcPositionsFromIndexes(new IntegerDataset(indexes, 2, 2), shape);
Assert.assertEquals(shape.length, list.size());
IntegerDataset l = list.get(0);
Assert.assertEquals(2, l.getRank());
Assert.assertEquals(2, l.getShapeRef()[0]);
Assert.assertEquals(2, l.getShapeRef()[1]);
checkPositions(list, new int[] {0, 0, 1}, 0, 0);
checkPositions(list, new int[] {0, 5, 0}, 0, 1);
checkPositions(list, new int[] {1, 1, 0}, 1, 0);
checkPositions(list, new int[] {2, 17, 1}, 1, 1);
}
private void checkPositions(List<IntegerDataset> list, int[] expected, int... position) {
int j = 0;
for (int i : expected) {
IntegerDataset l = list.get(j++);
Assert.assertEquals(i, l.getInt(position));
}
}
@Test
public void testIndexes() {
List<IntegerDataset> list = new ArrayList<IntegerDataset>();
int[] shape = new int[] { 23, 34, 2 };
list.add(new IntegerDataset(new int[] {0, 0, 1, 2}, 2, 2));
list.add(new IntegerDataset(new int[] {0, 5, 1, 17}, 2, 2));
list.add(new IntegerDataset(new int[] {1, 0, 0, 1}, 2, 2));
IntegerDataset indexes = DatasetUtils.calcIndexesFromPositions(list, shape, null);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {0, -5, 1, 17}, 2, 2));
try {
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, null);
Assert.fail("Should have thrown an exception");
} catch (Exception e) {
}
list.set(1, new IntegerDataset(new int[] {0, 34, 1, 17}, 2, 2));
try {
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, null);
Assert.fail("Should have thrown an exception");
} catch (Exception e) {
}
list.set(1, new IntegerDataset(new int[] {0, 39, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 1);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {0, -29, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 1);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {-2, 5, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 2);
checkDatasets(indexes, new IntegerDataset(new int[] {1, 10, 70, 171}, 2, 2));
list.set(1, new IntegerDataset(new int[] {34, 5, 1, 17}, 2, 2));
indexes = DatasetUtils.calcIndexesFromPositions(list, shape, 2);
checkDatasets(indexes, new IntegerDataset(new int[] {33*2 + 1, 10, 70, 171}, 2, 2));
}
@Test
public void testSetByBoolean() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
a.max();
a.setByBoolean(0, Comparisons.greaterThan(a, 5));
Assert.assertEquals(a.max().longValue(), 5);
}
@Test
public void testExtract() {
Dataset a = DatasetFactory.createRange(20, Dataset.INT32).reshape(4,5);
Dataset b = DatasetFactory.createFromObject(new boolean[] {true, false, true, false, false});
checkDatasets(DatasetUtils.extract(a, b), DatasetFactory.createFromObject(new int[] {0, 2, 5, 7, 10, 12, 15, 17}));
}
@Test
public void testSetBy1DIndex() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
a.max();
a.setBy1DIndex(0, Comparisons.nonZero(Comparisons.greaterThan(a, 5)).get(0));
Assert.assertEquals(a.max().longValue(), 5);
}
@Test
public void testSetByPosition() {
Dataset a = DatasetFactory.createRange(10, Dataset.INT32);
a.max();
List<IntegerDataset> list = Comparisons.nonZero(Comparisons.greaterThan(a, 5));
a.setByIndexes(0, list.get(0));
Assert.assertEquals(a.max().longValue(), 5);
a = DatasetFactory.createRange(10, Dataset.INT32).reshape(2, 5);
a.max();
list = Comparisons.nonZero(Comparisons.greaterThan(a, 5));
a.setByIndexes(0, list.get(0), list.get(1));
Assert.assertEquals(a.max().longValue(), 5);
}
@Test
public void testReshape() {
Dataset a = DatasetFactory.createRange(60, Dataset.INT32);
Dataset b = a.getSliceView(new int[] {1}, null, new int[] {2});
Dataset c = a.getSlice(new int[] {1}, null, new int[] {2});
checkDatasets(b, c);
// check if strides still work
b.setShape(6, 5);
c.setShape(6, 5);
checkDatasets(b, c);
b.setShape(1, 6, 5);
c.setShape(1, 6, 5);
checkDatasets(b, c);
b.setShape(1, 6, 1, 5);
c.setShape(1, 6, 1, 5);
checkDatasets(b, c);
b.setShape(30);
c.setShape(30);
checkDatasets(b, c);
b.setShape(6, 5);
try {
Dataset d = b.getSliceView(new Slice(1,6,2));
d.setShape(15);
Assert.fail("Should have thrown an illegal argument exception");
} catch (IllegalArgumentException e) {
// do nothing
} catch (Exception e) {
Assert.fail("Should have thrown an illegal argument exception");
}
}
@Test
public void testDatasetVariance() {
Random.seed(12345);
final Dataset image = Maths.multiply(Random.rand(new int[] { 10, 10 }), 1);
double mean = ((Number) image.mean()).doubleValue();
Dataset square = Maths.square(Maths.subtract(image, mean));
double var = ((Number) square.mean()).doubleValue();
Assert.assertEquals(var, image.variance(true).doubleValue(), var * 1.e-15);
}
@Test
public void testBroadcast() {
Dataset a = DatasetFactory.createRange(3, Dataset.INT32);
Dataset b = checkBroadcast2D(a, false, 2, 3);
Assert.assertEquals(1, b.getInt(0, 1));
Assert.assertEquals(1, b.getInt(1, 1));
a.setShape(3, 1);
b = checkBroadcast2D(a, true, 3, 4);
Assert.assertEquals(1, b.getInt(1, 0));
Assert.assertEquals(1, b.getInt(1, 1));
}
private Dataset checkBroadcast2D(Dataset a, boolean broadcastFirstDim, int... broadcastShape) {
Dataset b = a.getBroadcastView(broadcastShape);
Assert.assertArrayEquals(broadcastShape, b.getShape());
int size = AbstractDataset.calcSize(broadcastShape);
Assert.assertEquals(size, b.getSize());
IndexIterator it = b.getIterator(true);
int[] pos = it.getPos();
int i = 0;
while (it.hasNext()) {
i++;
if (broadcastFirstDim) {
Assert.assertEquals(a.getInt(pos[0], 0), b.getInt(pos));
Assert.assertEquals(a.getInt(pos[0], 0), b.getElementLongAbs(it.index));
} else {
Assert.assertEquals(a.getInt(pos[1]), b.getInt(pos));
Assert.assertEquals(a.getInt(pos[1]), b.getElementLongAbs(it.index));
}
}
Assert.assertEquals(size, i);
return b;
}
}
|
Increase time limit factor for Travis-CI
|
org.eclipse.dawnsci.analysis.dataset.test/src/org/eclipse/dawnsci/analysis/dataset/AbstractDatasetTest.java
|
Increase time limit factor for Travis-CI
|
|
Java
|
agpl-3.0
|
e386f3c723235a1c5f59fbb511a8b158a5c4cf58
| 0
|
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
|
172c7f08-2e61-11e5-9284-b827eb9e62be
|
hello.java
|
17271a7c-2e61-11e5-9284-b827eb9e62be
|
172c7f08-2e61-11e5-9284-b827eb9e62be
|
hello.java
|
172c7f08-2e61-11e5-9284-b827eb9e62be
|
|
Java
|
lgpl-2.1
|
4811f6816d8803d162685f6e1eeeda36912938cb
| 0
|
pbondoer/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.rest;
import java.io.InputStream;
import java.util.logging.LogManager;
import javax.servlet.ServletException;
import org.restlet.Application;
import org.restlet.Context;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.restlet.ext.servlet.ServerServlet;
/**
* <p>
* The XWiki Restlet servlet is used to provide additional initialization logic to the base Restlet servlet. This
* servlet does three things:
* </p>
* <ul>
* <li>Creates the Restlet application.</li>
* <li>Initialize the logging system by reading the configuration from well-defined locations.</li>
* <li>Injects the component manager in the Restlet application context so that it will be accessible by all the other
* Restlet components.</li>
* <li>Set the object factory for the JAX-RS application to a factory that will use the component manager in order to
* create instances (this will allow us to declare JAX-RS resources as XWiki components)</li>
* </ul>
*
* @version $Id$
*/
public class XWikiRestletServlet extends ServerServlet
{
private static final String JAVA_LOGGING_PROPERTY_FILE = "java-logging.properties";
private static final long serialVersionUID = 9148448182654390153L;
@Override
protected Application createApplication(Context context)
{
Application application = super.createApplication(context);
/* Retrieve the application context in order to populate it with relevant variables. */
Context applicationContext = application.getContext();
/* Retrieve the component manager and make it available in the restlet application context. */
ComponentManager componentManager = getComponentManager(context);
applicationContext.getAttributes().put(Constants.XWIKI_COMPONENT_MANAGER, componentManager);
/* Set the object factory for instantiating components. */
if (application instanceof XWikiRestletJaxRsApplication) {
XWikiRestletJaxRsApplication jaxrsApplication = (XWikiRestletJaxRsApplication) application;
jaxrsApplication.setObjectFactory(new ComponentsObjectFactory(componentManager));
} else {
log("The Restlet application is not an instance of XWikiRestletJaxRsApplication. Please check your web.xml");
}
return application;
}
@Override
public void init() throws ServletException
{
super.init();
try {
/* Try first in WEB-INF */
InputStream is =
getServletContext().getResourceAsStream(String.format("/WEB-INF/%s", JAVA_LOGGING_PROPERTY_FILE));
/* If nothing is there then try in the current jar */
if (is == null) {
is = getClass().getClassLoader().getResourceAsStream(JAVA_LOGGING_PROPERTY_FILE);
}
if (is != null) {
LogManager.getLogManager().readConfiguration(is);
is.close();
}
} catch (Exception e) {
log("Unable to initialize Java logging framework. Using defaults", e);
}
}
/**
* Finds the correct Component Manager to use to find REST Resource components. This is important so that
* components registered in a children Component Manager are found (for example a REST Resource Component added
* in a subwiki).
*
* @param context the RESTlet context
* @return the Context Component Manager or if it doesn't exist the Root Component Manager
*/
private ComponentManager getComponentManager(Context context)
{
ComponentManager result =
(ComponentManager) getServletContext().getAttribute("org.xwiki.component.manager.ComponentManager");
try {
result = result.getInstance(ComponentManager.class, "context");
} catch (ComponentLookupException e) {
// Return the root CM since there's no Context CM!
}
return result;
}
}
|
xwiki-platform-core/xwiki-platform-rest/xwiki-platform-rest-server/src/main/java/org/xwiki/rest/XWikiRestletServlet.java
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.rest;
import java.io.InputStream;
import java.util.logging.LogManager;
import javax.servlet.ServletException;
import org.restlet.Application;
import org.restlet.Context;
import org.xwiki.component.manager.ComponentManager;
import org.restlet.ext.servlet.ServerServlet;
/**
* <p>
* The XWiki Restlet servlet is used to provide additional initialization logic to the base Restlet servlet. This
* servlet does three things:
* </p>
* <ul>
* <li>Creates the Restlet application.</li>
* <li>Initialize the logging system by reading the configuration from well-defined locations.</li>
* <li>Injects the component manager in the Restlet application context so that it will be accessible by all the other
* Restlet components.</li>
* <li>Set the object factory for the JAX-RS application to a factory that will use the component manager in order to
* create instances (this will allow us to declare JAX-RS resources as XWiki components)</li>
* </ul>
*
* @version $Id$
*/
public class XWikiRestletServlet extends ServerServlet
{
private static final String JAVA_LOGGING_PROPERTY_FILE = "java-logging.properties";
private static final long serialVersionUID = 9148448182654390153L;
@Override
protected Application createApplication(Context context)
{
Application application = super.createApplication(context);
/* Retrieve the application context in order to populate it with relevant variables. */
Context applicationContext = application.getContext();
/* Retrieve the component manager and make it available in the restlet application context. */
ComponentManager componentManager =
(ComponentManager) getServletContext().getAttribute("org.xwiki.component.manager.ComponentManager");
applicationContext.getAttributes().put(Constants.XWIKI_COMPONENT_MANAGER, componentManager);
/* Set the object factory for instantiating components. */
if (application instanceof XWikiRestletJaxRsApplication) {
XWikiRestletJaxRsApplication jaxrsApplication = (XWikiRestletJaxRsApplication) application;
jaxrsApplication.setObjectFactory(new ComponentsObjectFactory(componentManager));
} else {
log("The Restlet application is not an instance of XWikiRestletJaxRsApplication. Please check your web.xml");
}
return application;
}
@Override
public void init() throws ServletException
{
super.init();
try {
/* Try first in WEB-INF */
InputStream is =
getServletContext().getResourceAsStream(String.format("/WEB-INF/%s", JAVA_LOGGING_PROPERTY_FILE));
/* If nothing is there then try in the current jar */
if (is == null) {
is = getClass().getClassLoader().getResourceAsStream(JAVA_LOGGING_PROPERTY_FILE);
}
if (is != null) {
LogManager.getLogManager().readConfiguration(is);
is.close();
}
} catch (Exception e) {
log("Unable to initialize Java logging framework. Using defaults", e);
}
}
}
|
XWIKI-8218: Repository Application's REST resources don't work when installed through the Extension Manager
Note that this still doesn't make it work immediately after the extension has been installed since the XWiki REST module only loads REST Resources once at startup ATM. However it makes it work if you restart XWiki (which was not even working before...)
|
xwiki-platform-core/xwiki-platform-rest/xwiki-platform-rest-server/src/main/java/org/xwiki/rest/XWikiRestletServlet.java
|
XWIKI-8218: Repository Application's REST resources don't work when installed through the Extension Manager
|
|
Java
|
apache-2.0
|
817f2e55226e13de8c9acf158fd88d49c06c9f54
| 0
|
gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle
|
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.vfs.impl;
import com.google.common.base.CharMatcher;
import com.google.common.base.Splitter;
import com.google.common.collect.Interner;
import org.gradle.internal.file.FileMetadataSnapshot;
import org.gradle.internal.file.Stat;
import org.gradle.internal.hash.FileHasher;
import org.gradle.internal.hash.HashCode;
import org.gradle.internal.snapshot.DirectorySnapshot;
import org.gradle.internal.snapshot.FileMetadata;
import org.gradle.internal.snapshot.FileSystemSnapshotVisitor;
import org.gradle.internal.snapshot.RegularFileSnapshot;
import org.gradle.internal.snapshot.SnapshottingFilter;
import org.gradle.internal.snapshot.impl.DirectorySnapshotter;
import org.gradle.internal.snapshot.impl.FileSystemSnapshotFilter;
import org.gradle.internal.vfs.VirtualFileSystem;
import javax.annotation.Nullable;
import java.io.File;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
public class DefaultVirtualFileSystem implements VirtualFileSystem {
// On Windows, / and \ are separators, on Unix only / is a separator.
private static final Splitter FILE_PATH_SPLITTER = File.separatorChar != '/'
? Splitter.on(CharMatcher.anyOf("/" + File.separator))
: Splitter.on('/');
private final RootNode root = new RootNode();
private final Stat stat;
private final DirectorySnapshotter directorySnapshotter;
private final FileHasher hasher;
public DefaultVirtualFileSystem(FileHasher hasher, Interner<String> stringInterner, Stat stat, String... defaultExcludes) {
this.stat = stat;
this.directorySnapshotter = new DirectorySnapshotter(hasher, stringInterner, defaultExcludes);
this.hasher = hasher;
}
@Override
public synchronized void read(String location, FileSystemSnapshotVisitor visitor) {
readLocation(location)
.accept(visitor);
}
@Override
public synchronized void read(String location, SnapshottingFilter filter, FileSystemSnapshotVisitor visitor) {
if (filter.isEmpty()) {
read(location, visitor);
} else {
readLocation(location)
.accept(new FileSystemSnapshotFilter.FilteringVisitor(filter.getAsSnapshotPredicate(), visitor, new AtomicBoolean(false)));
}
}
protected Node createNode(String location, Node parent) {
File file = new File(location);
FileMetadataSnapshot stat = this.stat.stat(file);
switch (stat.getType()) {
case RegularFile:
HashCode hash = hasher.hash(file);
return new FileNode(new RegularFileSnapshot(location, file.getName(), hash, FileMetadata.from(stat)));
case Missing:
return new MissingFileNode(location, file.getName());
case Directory:
DirectorySnapshot directorySnapshot = (DirectorySnapshot) directorySnapshotter.snapshot(location, null, new AtomicBoolean(false));
return new CompleteDirectoryNode(parent, directorySnapshot);
default:
throw new UnsupportedOperationException();
}
}
private Node readLocation(String location) {
List<String> pathSegments = getPathSegments(location);
Function<Node, Node> nodeCreator = parent -> createNode(location, parent);
Node parent = findParent(pathSegments);
return parent.replaceChild(
pathSegments.get(pathSegments.size() - 1),
nodeCreator,
current -> current.getType() != Node.Type.UNKNOWN
? current
: nodeCreator.apply(parent)
);
}
private Node findParent(List<String> pathSegments) {
Node foundNode = root;
for (int i = 0; i < pathSegments.size() - 1; i++) {
String pathSegment = pathSegments.get(i);
foundNode = foundNode.getOrCreateChild(pathSegment, parent -> new DefaultNode(pathSegment, parent));
}
return foundNode;
}
@Override
public synchronized void update(Iterable<String> locations, Runnable action) {
locations.forEach(location -> {
List<String> pathSegments = getPathSegments(location);
Node parentLocation = findParentNotCreating(pathSegments);
if (parentLocation != null) {
String name = pathSegments.get(pathSegments.size() - 1);
parentLocation.replaceChild(name, parent -> null, nodeToBeReplaced -> null);
}
});
action.run();
}
@Override
public synchronized void invalidateAll() {
root.clear();
}
@Nullable
private Node findParentNotCreating(List<String> pathSegments) {
Node foundNode = root;
for (int i = 0; i < pathSegments.size() - 1; i++) {
String pathSegment = pathSegments.get(i);
foundNode = foundNode.getOrCreateChild(pathSegment, parent -> null);
if (foundNode == null) {
return null;
}
}
return foundNode;
}
private static List<String> getPathSegments(String path) {
return FILE_PATH_SPLITTER.splitToList(path);
}
}
|
subprojects/snapshots/src/main/java/org/gradle/internal/vfs/impl/DefaultVirtualFileSystem.java
|
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.vfs.impl;
import com.google.common.base.CharMatcher;
import com.google.common.base.Splitter;
import com.google.common.collect.Interner;
import org.gradle.internal.file.FileMetadataSnapshot;
import org.gradle.internal.file.Stat;
import org.gradle.internal.hash.FileHasher;
import org.gradle.internal.hash.HashCode;
import org.gradle.internal.snapshot.DirectorySnapshot;
import org.gradle.internal.snapshot.FileMetadata;
import org.gradle.internal.snapshot.FileSystemSnapshotVisitor;
import org.gradle.internal.snapshot.RegularFileSnapshot;
import org.gradle.internal.snapshot.SnapshottingFilter;
import org.gradle.internal.snapshot.impl.DirectorySnapshotter;
import org.gradle.internal.snapshot.impl.FileSystemSnapshotFilter;
import org.gradle.internal.vfs.VirtualFileSystem;
import javax.annotation.Nullable;
import java.io.File;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
public class DefaultVirtualFileSystem implements VirtualFileSystem {
// On Windows, / and \ are separators, on Unix only / is a separator.
private static final Splitter FILE_PATH_SPLITTER = File.separatorChar != '/'
? Splitter.on(CharMatcher.anyOf("/" + File.separator))
: Splitter.on('/');
private final RootNode root = new RootNode();
private final Stat stat;
private final DirectorySnapshotter directorySnapshotter;
private final FileHasher hasher;
public DefaultVirtualFileSystem(FileHasher hasher, Interner<String> stringInterner, Stat stat, String... defaultExcludes) {
this.stat = stat;
this.directorySnapshotter = new DirectorySnapshotter(hasher, stringInterner, defaultExcludes);
this.hasher = hasher;
}
@Override
public void read(String location, FileSystemSnapshotVisitor visitor) {
readLocation(location)
.accept(visitor);
}
@Override
public void read(String location, SnapshottingFilter filter, FileSystemSnapshotVisitor visitor) {
if (filter.isEmpty()) {
read(location, visitor);
} else {
readLocation(location)
.accept(new FileSystemSnapshotFilter.FilteringVisitor(filter.getAsSnapshotPredicate(), visitor, new AtomicBoolean(false)));
}
}
protected Node createNode(String location, Node parent) {
File file = new File(location);
FileMetadataSnapshot stat = this.stat.stat(file);
switch (stat.getType()) {
case RegularFile:
HashCode hash = hasher.hash(file);
return new FileNode(new RegularFileSnapshot(location, file.getName(), hash, FileMetadata.from(stat)));
case Missing:
return new MissingFileNode(location, file.getName());
case Directory:
DirectorySnapshot directorySnapshot = (DirectorySnapshot) directorySnapshotter.snapshot(location, null, new AtomicBoolean(false));
return new CompleteDirectoryNode(parent, directorySnapshot);
default:
throw new UnsupportedOperationException();
}
}
private Node readLocation(String location) {
List<String> pathSegments = getPathSegments(location);
Function<Node, Node> nodeCreator = parent -> createNode(location, parent);
Node parent = findParent(pathSegments);
return parent.replaceChild(
pathSegments.get(pathSegments.size() - 1),
nodeCreator,
current -> current.getType() != Node.Type.UNKNOWN
? current
: nodeCreator.apply(parent)
);
}
private Node findParent(List<String> pathSegments) {
Node foundNode = root;
for (int i = 0; i < pathSegments.size() - 1; i++) {
String pathSegment = pathSegments.get(i);
foundNode = foundNode.getOrCreateChild(pathSegment, parent -> new DefaultNode(pathSegment, parent));
}
return foundNode;
}
@Override
public void update(Iterable<String> locations, Runnable action) {
locations.forEach(location -> {
List<String> pathSegments = getPathSegments(location);
Node parentLocation = findParentNotCreating(pathSegments);
if (parentLocation != null) {
String name = pathSegments.get(pathSegments.size() - 1);
parentLocation.replaceChild(name, parent -> null, nodeToBeReplaced -> null);
}
});
action.run();
}
@Override
public void invalidateAll() {
root.clear();
}
@Nullable
private Node findParentNotCreating(List<String> pathSegments) {
Node foundNode = root;
for (int i = 0; i < pathSegments.size() - 1; i++) {
String pathSegment = pathSegments.get(i);
foundNode = foundNode.getOrCreateChild(pathSegment, parent -> null);
if (foundNode == null) {
return null;
}
}
return foundNode;
}
private static List<String> getPathSegments(String path) {
return FILE_PATH_SPLITTER.splitToList(path);
}
}
|
Make implementation single threaded
|
subprojects/snapshots/src/main/java/org/gradle/internal/vfs/impl/DefaultVirtualFileSystem.java
|
Make implementation single threaded
|
|
Java
|
apache-2.0
|
d53f504eb70d7a05196447cb4b8fad767b1efc01
| 0
|
magnoliales/magnolia-annotated-content-apps
|
package app;
import com.magnoliales.annotatedapp.AnnotatedContentAppsAppDescriptor;
import com.magnoliales.annotatedapp.constraint.AnnotatedDropConstraint;
import com.magnoliales.annotatedapp.dialog.AnnotatedFormDialogDefinition;
import nodes.Contact;
public class ExampleAnnotatedAppAppDescriptor extends AnnotatedContentAppsAppDescriptor {
public ExampleAnnotatedAppAppDescriptor() {
super(Contact.class, ContactDropConstraint.class, new AnnotatedFormDialogDefinition[] {
new ContactFormDialogDefinition()
});
}
public static class ContactDropConstraint extends AnnotatedDropConstraint {
public ContactDropConstraint() {
super(Contact.class);
}
}
public static class ContactFormDialogDefinition extends AnnotatedFormDialogDefinition {
public ContactFormDialogDefinition() {
super(Contact.class);
}
}
}
|
magnolia-annotated-content-apps-example/src/main/java/app/ExampleAnnotatedAppAppDescriptor.java
|
package app;
import com.magnoliales.annotatedapp.AnnotatedContentAppsAppDescriptor;
import com.magnoliales.annotatedapp.constraint.AnnotatedDropConstraint;
import com.magnoliales.annotatedapp.dialog.AnnotatedFormDialogDefinition;
import nodes.Contact;
public class ExampleAnnotatedAppAppDescriptor extends AnnotatedContentAppsAppDescriptor {
public ExampleAnnotatedAppAppDescriptor() {
super(Contact.class, MemberDropConstraint.class, new AnnotatedFormDialogDefinition[] {
new MemberFormDialogDefinition()
});
}
public static class MemberDropConstraint extends AnnotatedDropConstraint {
public MemberDropConstraint() {
super(Contact.class);
}
}
public static class MemberFormDialogDefinition extends AnnotatedFormDialogDefinition {
public MemberFormDialogDefinition() {
super(Contact.class);
}
}
}
|
Correctly naming classes
|
magnolia-annotated-content-apps-example/src/main/java/app/ExampleAnnotatedAppAppDescriptor.java
|
Correctly naming classes
|
|
Java
|
apache-2.0
|
2af8126cb7b61e7fdddb6ca14c9d2d658700988e
| 0
|
IHTSDO/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl
|
/*
* Copyright 2011-2017 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.api.impl;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Lists.newArrayList;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.NoSuchElementException;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortField.Type;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHitCountCollector;
import com.b2international.index.compat.SingleDirectoryIndexImpl;
import com.b2international.index.lucene.Fields;
import com.b2international.index.lucene.QueryBuilderBase.QueryBuilder;
import com.b2international.snowowl.core.ApplicationContext;
import com.b2international.snowowl.core.api.IBranchPath;
import com.b2international.snowowl.datastore.BranchPathUtils;
import com.b2international.snowowl.datastore.server.domain.StorageRef;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.snomed.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.api.domain.classification.ChangeNature;
import com.b2international.snowowl.snomed.api.domain.classification.ClassificationStatus;
import com.b2international.snowowl.snomed.api.domain.classification.IClassificationRun;
import com.b2international.snowowl.snomed.api.domain.classification.IEquivalentConcept;
import com.b2international.snowowl.snomed.api.domain.classification.IEquivalentConceptSet;
import com.b2international.snowowl.snomed.api.domain.classification.IRelationshipChange;
import com.b2international.snowowl.snomed.api.domain.classification.IRelationshipChangeList;
import com.b2international.snowowl.snomed.api.exception.ClassificationRunNotFoundException;
import com.b2international.snowowl.snomed.api.impl.domain.classification.ClassificationRun;
import com.b2international.snowowl.snomed.api.impl.domain.classification.EquivalentConcept;
import com.b2international.snowowl.snomed.api.impl.domain.classification.EquivalentConceptSet;
import com.b2international.snowowl.snomed.api.impl.domain.classification.RelationshipChange;
import com.b2international.snowowl.snomed.api.impl.domain.classification.RelationshipChangeList;
import com.b2international.snowowl.snomed.core.domain.RelationshipModifier;
import com.b2international.snowowl.snomed.datastore.SnomedDatastoreActivator;
import com.b2international.snowowl.snomed.datastore.request.SnomedRequests;
import com.b2international.snowowl.snomed.reasoner.classification.AbstractEquivalenceSet;
import com.b2international.snowowl.snomed.reasoner.classification.EquivalenceSet;
import com.b2international.snowowl.snomed.reasoner.classification.GetResultResponseChanges;
import com.b2international.snowowl.snomed.reasoner.classification.entry.AbstractChangeEntry.Nature;
import com.b2international.snowowl.snomed.reasoner.classification.entry.RelationshipChangeEntry;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.primitives.Ints;
public class ClassificationRunIndex extends SingleDirectoryIndexImpl {
private static final String FIELD_ID = "id";
private static final String FIELD_CLASS = "class";
private static final String FIELD_BRANCH_PATH = "branchPath";
private static final String FIELD_USER_ID = "userId";
private static final String FIELD_CREATION_DATE = "creationDate";
private static final String FIELD_STATUS = "status";
private static final String FIELD_SOURCE = "source";
private static final String FIELD_COMPONENT_ID = "componentId";
private final ObjectMapper objectMapper;
public ClassificationRunIndex(final File directory) {
super(directory);
objectMapper = new ObjectMapper();
}
public void trimIndex(int maximumResultsToKeep) throws IOException {
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.matchAll();
// Sort by decreasing document order
final Sort sort = new Sort(new SortField(null, Type.DOC, true));
final ClassificationRun lastRunToKeep = Iterables.getFirst(search(query, ClassificationRun.class, sort, maximumResultsToKeep - 1, 1), null);
if (lastRunToKeep == null) {
return;
}
final Date lastCreationDate = lastRunToKeep.getCreationDate();
final Query trimmingQuery = NumericRangeQuery.newLongRange(FIELD_CREATION_DATE, null, lastCreationDate.getTime(), false, false);
writer.deleteDocuments(trimmingQuery);
commit();
}
public void invalidateClassificationRuns() throws IOException {
final Query statusQuery = Fields.newQuery()
.field(FIELD_STATUS, ClassificationStatus.COMPLETED.name())
.field(FIELD_STATUS, ClassificationStatus.RUNNING.name())
.field(FIELD_STATUS, ClassificationStatus.SAVING_IN_PROGRESS.name())
.field(FIELD_STATUS, ClassificationStatus.SCHEDULED.name())
.matchAny();
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.and(statusQuery)
.matchAll();
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
final int totalHits = collector.getTotalHits();
final int docsToRetrieve = Ints.min(searcher.getIndexReader().maxDoc(), totalHits);
if (docsToRetrieve < 1) {
return;
}
final TopDocs docs = searcher.search(query, null, docsToRetrieve, Sort.INDEXORDER, false, false);
final ScoreDoc[] scoreDocs = docs.scoreDocs;
final ObjectReader reader = objectMapper.reader(ClassificationRun.class);
for (int i = 0; i < scoreDocs.length; i++) {
final Document sourceDocument = searcher.doc(scoreDocs[i].doc, ImmutableSet.of(FIELD_BRANCH_PATH, FIELD_SOURCE));
final String branchPath = sourceDocument.get(FIELD_BRANCH_PATH);
final String source = sourceDocument.get(FIELD_SOURCE);
final ClassificationRun run = reader.readValue(source);
run.setStatus(ClassificationStatus.STALE);
upsertClassificationRunNoCommit(BranchPathUtils.createPath(branchPath), run);
}
commit();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
public List<IClassificationRun> getAllClassificationRuns(final StorageRef storageRef) throws IOException {
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.field(FIELD_BRANCH_PATH, storageRef.getBranchPath())
.matchAll();
return this.<IClassificationRun>search(query, ClassificationRun.class);
}
public IClassificationRun getClassificationRun(final StorageRef storageRef, final String classificationId) throws IOException {
final Query query = createClassQuery(ClassificationRun.class.getSimpleName(), classificationId, storageRef, null);
try {
return Iterables.getOnlyElement(search(query, ClassificationRun.class, 1));
} catch (final NoSuchElementException e) {
throw new ClassificationRunNotFoundException(classificationId);
}
}
public void upsertClassificationRun(final IBranchPath branchPath, final ClassificationRun classificationRun) throws IOException {
upsertClassificationRunNoCommit(branchPath, classificationRun);
commit();
}
private void upsertClassificationRunNoCommit(final IBranchPath branchPath, final ClassificationRun classificationRun) throws IOException {
final Document updatedDocument = new Document();
Fields.searchOnlyStringField(FIELD_CLASS).addTo(updatedDocument, ClassificationRun.class.getSimpleName());
Fields.searchOnlyStringField(FIELD_ID).addTo(updatedDocument, classificationRun.getId());
Fields.searchOnlyStringField(FIELD_STATUS).addTo(updatedDocument, classificationRun.getStatus().name());
Fields.longField(FIELD_CREATION_DATE).addTo(updatedDocument, classificationRun.getCreationDate().getTime());
Fields.stringField(FIELD_USER_ID).addTo(updatedDocument, classificationRun.getUserId());
Fields.stringField(FIELD_BRANCH_PATH).addTo(updatedDocument, branchPath.getPath());
Fields.storedOnlyStringField(FIELD_SOURCE).addTo(updatedDocument, objectMapper.writer().writeValueAsString(classificationRun));
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.field(FIELD_ID, classificationRun.getId())
.matchAll();
writer.deleteDocuments(query);
writer.addDocument(updatedDocument);
}
public void updateClassificationRunStatus(final String id, final ClassificationStatus newStatus) throws IOException {
updateClassificationRunStatus(id, newStatus, null);
}
public void updateClassificationRunStatus(final String id, final ClassificationStatus newStatus, final GetResultResponseChanges changes) throws IOException {
final Document sourceDocument = getClassificationRunDocument(id);
if (null == sourceDocument) {
return;
}
final IBranchPath branchPath = BranchPathUtils.createPath(sourceDocument.get(FIELD_BRANCH_PATH));
final ClassificationRun classificationRun = objectMapper.reader(ClassificationRun.class).readValue(sourceDocument.get(FIELD_SOURCE));
if (newStatus.equals(classificationRun.getStatus())) {
return;
}
classificationRun.setStatus(newStatus);
if (ClassificationStatus.COMPLETED.equals(newStatus)) {
checkNotNull(changes, "GetResultResponseChanges are required to update a completed classification.");
if (null == classificationRun.getCompletionDate()) {
classificationRun.setCompletionDate(new Date());
}
final ClassificationIssueFlags issueFlags = indexChanges(sourceDocument, id, changes);
classificationRun.setInferredRelationshipChangesFound(!changes.getRelationshipEntries().isEmpty());
classificationRun.setRedundantStatedRelationshipsFound(issueFlags.isRedundantStatedFound());
classificationRun.setEquivalentConceptsFound(issueFlags.isEquivalentConceptsFound());
} else if (ClassificationStatus.SAVED.equals(newStatus)) {
classificationRun.setSaveDate(new Date());
}
upsertClassificationRun(branchPath, classificationRun);
}
private boolean isStatedRelationshipPairExists(IBranchPath branchPath, RelationshipChangeEntry relationshipChange) {
return SnomedRequests.prepareSearchRelationship()
.setLimit(0)
.filterBySource(relationshipChange.getSource().getId().toString())
.filterByDestination(relationshipChange.getDestination().getId().toString())
.filterByType(relationshipChange.getType().getId().toString())
.filterByGroup(relationshipChange.getGroup()) // this is questionable?
.filterByCharacteristicType(Concepts.STATED_RELATIONSHIP)
.build(SnomedDatastoreActivator.REPOSITORY_UUID, branchPath.getPath())
.execute(ApplicationContext.getServiceForClass(IEventBus.class))
.getSync().getTotal() > 0;
}
public void deleteClassificationData(final String classificationId) throws IOException {
// Removes all documents, not just the classification run document
writer.deleteDocuments(new Term(FIELD_ID, classificationId));
commit();
}
private ClassificationIssueFlags indexChanges(Document sourceDocument, String id, final GetResultResponseChanges changes) throws IOException {
final IBranchPath branchPath = BranchPathUtils.createPath(sourceDocument.get(FIELD_BRANCH_PATH));
final String userId = sourceDocument.get(FIELD_USER_ID);
final long creationDate = sourceDocument.getField(FIELD_CREATION_DATE).numericValue().longValue();
final ClassificationIssueFlags classificationIssueFlags = new ClassificationIssueFlags();
final List<AbstractEquivalenceSet> equivalenceSets = changes.getEquivalenceSets();
classificationIssueFlags.setEquivalentConceptsFound(!equivalenceSets.isEmpty());
for (final AbstractEquivalenceSet equivalenceSet : equivalenceSets) {
final List<IEquivalentConcept> convertedEquivalentConcepts = newArrayList();
for (final String equivalentId : equivalenceSet.getConceptIds()) {
addEquivalentConcept(convertedEquivalentConcepts, equivalentId);
}
if (equivalenceSet instanceof EquivalenceSet) {
addEquivalentConcept(convertedEquivalentConcepts, ((EquivalenceSet) equivalenceSet).getSuggestedConceptId());
}
final EquivalentConceptSet convertedEquivalenceSet = new EquivalentConceptSet();
convertedEquivalenceSet.setUnsatisfiable(equivalenceSet.isUnsatisfiable());
convertedEquivalenceSet.setEquivalentConcepts(convertedEquivalentConcepts);
indexResult(id, branchPath, userId, creationDate, EquivalentConceptSet.class, equivalenceSet.getConceptIds().get(0), convertedEquivalenceSet);
}
for (final RelationshipChangeEntry relationshipChange : changes.getRelationshipEntries()) {
final RelationshipChange convertedRelationshipChange = new RelationshipChange();
final ChangeNature changeNature = Nature.INFERRED.equals(relationshipChange.getNature()) ? ChangeNature.INFERRED : ChangeNature.REDUNDANT;
convertedRelationshipChange.setChangeNature(changeNature);
convertedRelationshipChange.setId(relationshipChange.getId());
convertedRelationshipChange.setDestinationId(Long.toString(relationshipChange.getDestination().getId()));
convertedRelationshipChange.setDestinationNegated(relationshipChange.isDestinationNegated());
final String characteristicTypeId;
if (changeNature == ChangeNature.INFERRED) {
characteristicTypeId = Concepts.INFERRED_RELATIONSHIP;
} else {
final boolean statedRelationshipExists = isStatedRelationshipPairExists(branchPath, relationshipChange);
characteristicTypeId = statedRelationshipExists ? Concepts.STATED_RELATIONSHIP : Concepts.INFERRED_RELATIONSHIP;
if (statedRelationshipExists) {
classificationIssueFlags.setRedundantStatedFound(true);
}
}
convertedRelationshipChange.setCharacteristicTypeId(characteristicTypeId);
convertedRelationshipChange.setGroup(relationshipChange.getGroup());
final String modifierId = Long.toString(relationshipChange.getModifier().getId());
convertedRelationshipChange.setModifier(Concepts.UNIVERSAL_RESTRICTION_MODIFIER.equals(modifierId) ? RelationshipModifier.UNIVERSAL : RelationshipModifier.EXISTENTIAL);
convertedRelationshipChange.setSourceId(Long.toString(relationshipChange.getSource().getId()));
convertedRelationshipChange.setTypeId(Long.toString(relationshipChange.getType().getId()));
convertedRelationshipChange.setUnionGroup(relationshipChange.getUnionGroup());
indexResult(id, branchPath, userId, creationDate, RelationshipChange.class, convertedRelationshipChange.getSourceId(), convertedRelationshipChange);
}
commit();
return classificationIssueFlags;
}
private void addEquivalentConcept(final List<IEquivalentConcept> convertedEquivalentConcepts, final String equivalentId) {
final EquivalentConcept convertedConcept = new EquivalentConcept();
convertedConcept.setId(equivalentId);
convertedEquivalentConcepts.add(convertedConcept);
}
/**
* @param storageRef
* @param classificationId
* @return
*/
public List<IEquivalentConceptSet> getEquivalentConceptSets(final StorageRef storageRef, final String classificationId) throws IOException {
final Query query = createClassQuery(EquivalentConceptSet.class.getSimpleName(), classificationId, storageRef, null);
return this.<IEquivalentConceptSet>search(query, EquivalentConceptSet.class);
}
/**
* @param storageRef
* @param classificationId
* @param sourceConceptId used to restrict results, can be null
* @param limit
* @param offset
* @return
*/
public IRelationshipChangeList getRelationshipChanges(final StorageRef storageRef, final String classificationId, final String sourceConceptId, final int offset, final int limit) throws IOException {
final Query query = createClassQuery(RelationshipChange.class.getSimpleName(), classificationId, storageRef, sourceConceptId);
final RelationshipChangeList result = new RelationshipChangeList();
result.setTotal(getHitCount(query));
result.setChanges(this.<IRelationshipChange>search(query, RelationshipChange.class, offset, limit));
return result;
}
private <T> void indexResult(final String id, final IBranchPath branchPath, final String userId, final long creationDate,
final Class<T> clazz, String componentId, final T value) throws IOException {
final Document doc = new Document();
Fields.searchOnlyStringField(FIELD_CLASS).addTo(doc, clazz.getSimpleName());
Fields.searchOnlyStringField(FIELD_ID).addTo(doc, id.toString());
Fields.searchOnlyStringField(FIELD_USER_ID).addTo(doc, userId);
Fields.searchOnlyLongField(FIELD_CREATION_DATE).addTo(doc, creationDate);
Fields.searchOnlyStringField(FIELD_BRANCH_PATH).addTo(doc, branchPath.getPath());
Fields.searchOnlyStringField(FIELD_COMPONENT_ID).addTo(doc, componentId);
Fields.storedOnlyStringField(FIELD_SOURCE).addTo(doc, objectMapper.writer().writeValueAsString(value));
writer.addDocument(doc);
}
private Document getClassificationRunDocument(final String id) throws IOException {
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.field(FIELD_ID, id.toString())
.matchAll();
return Iterables.getFirst(search(query, 1), null);
}
private Query createClassQuery(final String className, final String classificationId, StorageRef storageRef, final String componentId) {
final QueryBuilder query = Fields.newQuery()
.field(FIELD_CLASS, className)
.field(FIELD_ID, classificationId)
.field(FIELD_BRANCH_PATH, storageRef.getBranchPath());
if (componentId != null) {
query.field(FIELD_COMPONENT_ID, componentId);
}
return query.matchAll();
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass) throws IOException {
return search(query, sourceClass, Integer.MAX_VALUE);
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass, final int limit) throws IOException {
return search(query, sourceClass, 0, limit);
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass, final int offset, final int limit) throws IOException {
return search(query, sourceClass, Sort.INDEXORDER, offset, limit);
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass, Sort sort, final int offset, final int limit) throws IOException {
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
final int totalHits = collector.getTotalHits();
final int saturatedSum = Ints.saturatedCast((long) offset + limit);
final int docsToRetrieve = Ints.min(saturatedSum, searcher.getIndexReader().maxDoc(), totalHits);
final ImmutableList.Builder<T> resultBuilder = ImmutableList.builder();
if (docsToRetrieve < 1) {
return resultBuilder.build();
}
final TopDocs docs = searcher.search(query, null, docsToRetrieve, sort, false, false);
final ScoreDoc[] scoreDocs = docs.scoreDocs;
final ObjectReader reader = objectMapper.reader(sourceClass);
for (int i = offset; i < docsToRetrieve && i < scoreDocs.length; i++) {
final Document sourceDocument = searcher.doc(scoreDocs[i].doc, ImmutableSet.of(FIELD_SOURCE));
final String source = sourceDocument.get(FIELD_SOURCE);
final T deserializedSource = reader.readValue(source);
resultBuilder.add(deserializedSource);
}
return resultBuilder.build();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
private List<Document> search(final Query query, final int limit) throws IOException {
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TopDocs docs = searcher.search(query, null, limit, Sort.INDEXORDER, false, false);
final ImmutableList.Builder<Document> resultBuilder = ImmutableList.builder();
for (final ScoreDoc scoreDoc : docs.scoreDocs) {
resultBuilder.add(searcher.doc(scoreDoc.doc));
}
return resultBuilder.build();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
private int getHitCount(final Query query) throws IOException {
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
return collector.getTotalHits();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
private class ClassificationIssueFlags {
private boolean redundantStatedFound;
private boolean equivalentConceptsFound;
public boolean isRedundantStatedFound() {
return redundantStatedFound;
}
public void setRedundantStatedFound(boolean redundantStatedFound) {
this.redundantStatedFound = redundantStatedFound;
}
public boolean isEquivalentConceptsFound() {
return equivalentConceptsFound;
}
public void setEquivalentConceptsFound(boolean equivalentConceptsFound) {
this.equivalentConceptsFound = equivalentConceptsFound;
}
}
}
|
snomed/com.b2international.snowowl.snomed.api.impl/src/com/b2international/snowowl/snomed/api/impl/ClassificationRunIndex.java
|
/*
* Copyright 2011-2017 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.snomed.api.impl;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Lists.newArrayList;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.NoSuchElementException;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortField.Type;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHitCountCollector;
import com.b2international.index.compat.SingleDirectoryIndexImpl;
import com.b2international.index.lucene.Fields;
import com.b2international.index.lucene.QueryBuilderBase.QueryBuilder;
import com.b2international.snowowl.core.ApplicationContext;
import com.b2international.snowowl.core.api.IBranchPath;
import com.b2international.snowowl.datastore.BranchPathUtils;
import com.b2international.snowowl.datastore.server.domain.StorageRef;
import com.b2international.snowowl.eventbus.IEventBus;
import com.b2international.snowowl.snomed.SnomedConstants.Concepts;
import com.b2international.snowowl.snomed.api.domain.classification.ChangeNature;
import com.b2international.snowowl.snomed.api.domain.classification.ClassificationStatus;
import com.b2international.snowowl.snomed.api.domain.classification.IClassificationRun;
import com.b2international.snowowl.snomed.api.domain.classification.IEquivalentConcept;
import com.b2international.snowowl.snomed.api.domain.classification.IEquivalentConceptSet;
import com.b2international.snowowl.snomed.api.domain.classification.IRelationshipChange;
import com.b2international.snowowl.snomed.api.domain.classification.IRelationshipChangeList;
import com.b2international.snowowl.snomed.api.exception.ClassificationRunNotFoundException;
import com.b2international.snowowl.snomed.api.impl.domain.classification.ClassificationRun;
import com.b2international.snowowl.snomed.api.impl.domain.classification.EquivalentConcept;
import com.b2international.snowowl.snomed.api.impl.domain.classification.EquivalentConceptSet;
import com.b2international.snowowl.snomed.api.impl.domain.classification.RelationshipChange;
import com.b2international.snowowl.snomed.api.impl.domain.classification.RelationshipChangeList;
import com.b2international.snowowl.snomed.core.domain.RelationshipModifier;
import com.b2international.snowowl.snomed.core.domain.SnomedRelationship;
import com.b2international.snowowl.snomed.datastore.SnomedDatastoreActivator;
import com.b2international.snowowl.snomed.datastore.request.SnomedRequests;
import com.b2international.snowowl.snomed.reasoner.classification.AbstractEquivalenceSet;
import com.b2international.snowowl.snomed.reasoner.classification.EquivalenceSet;
import com.b2international.snowowl.snomed.reasoner.classification.GetResultResponseChanges;
import com.b2international.snowowl.snomed.reasoner.classification.entry.AbstractChangeEntry.Nature;
import com.b2international.snowowl.snomed.reasoner.classification.entry.RelationshipChangeEntry;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.primitives.Ints;
public class ClassificationRunIndex extends SingleDirectoryIndexImpl {
private static final String FIELD_ID = "id";
private static final String FIELD_CLASS = "class";
private static final String FIELD_BRANCH_PATH = "branchPath";
private static final String FIELD_USER_ID = "userId";
private static final String FIELD_CREATION_DATE = "creationDate";
private static final String FIELD_STATUS = "status";
private static final String FIELD_SOURCE = "source";
private static final String FIELD_COMPONENT_ID = "componentId";
private final ObjectMapper objectMapper;
public ClassificationRunIndex(final File directory) {
super(directory);
objectMapper = new ObjectMapper();
}
public void trimIndex(int maximumResultsToKeep) throws IOException {
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.matchAll();
// Sort by decreasing document order
final Sort sort = new Sort(new SortField(null, Type.DOC, true));
final ClassificationRun lastRunToKeep = Iterables.getFirst(search(query, ClassificationRun.class, sort, maximumResultsToKeep - 1, 1), null);
if (lastRunToKeep == null) {
return;
}
final Date lastCreationDate = lastRunToKeep.getCreationDate();
final Query trimmingQuery = NumericRangeQuery.newLongRange(FIELD_CREATION_DATE, null, lastCreationDate.getTime(), false, false);
writer.deleteDocuments(trimmingQuery);
commit();
}
public void invalidateClassificationRuns() throws IOException {
final Query statusQuery = Fields.newQuery()
.field(FIELD_STATUS, ClassificationStatus.COMPLETED.name())
.field(FIELD_STATUS, ClassificationStatus.RUNNING.name())
.field(FIELD_STATUS, ClassificationStatus.SAVING_IN_PROGRESS.name())
.field(FIELD_STATUS, ClassificationStatus.SCHEDULED.name())
.matchAny();
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.and(statusQuery)
.matchAll();
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
final int totalHits = collector.getTotalHits();
final int docsToRetrieve = Ints.min(searcher.getIndexReader().maxDoc(), totalHits);
if (docsToRetrieve < 1) {
return;
}
final TopDocs docs = searcher.search(query, null, docsToRetrieve, Sort.INDEXORDER, false, false);
final ScoreDoc[] scoreDocs = docs.scoreDocs;
final ObjectReader reader = objectMapper.reader(ClassificationRun.class);
for (int i = 0; i < scoreDocs.length; i++) {
final Document sourceDocument = searcher.doc(scoreDocs[i].doc, ImmutableSet.of(FIELD_BRANCH_PATH, FIELD_SOURCE));
final String branchPath = sourceDocument.get(FIELD_BRANCH_PATH);
final String source = sourceDocument.get(FIELD_SOURCE);
final ClassificationRun run = reader.readValue(source);
run.setStatus(ClassificationStatus.STALE);
upsertClassificationRunNoCommit(BranchPathUtils.createPath(branchPath), run);
}
commit();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
public List<IClassificationRun> getAllClassificationRuns(final StorageRef storageRef) throws IOException {
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.field(FIELD_BRANCH_PATH, storageRef.getBranchPath())
.matchAll();
return this.<IClassificationRun>search(query, ClassificationRun.class);
}
public IClassificationRun getClassificationRun(final StorageRef storageRef, final String classificationId) throws IOException {
final Query query = createClassQuery(ClassificationRun.class.getSimpleName(), classificationId, storageRef, null);
try {
return Iterables.getOnlyElement(search(query, ClassificationRun.class, 1));
} catch (final NoSuchElementException e) {
throw new ClassificationRunNotFoundException(classificationId);
}
}
public void upsertClassificationRun(final IBranchPath branchPath, final ClassificationRun classificationRun) throws IOException {
upsertClassificationRunNoCommit(branchPath, classificationRun);
commit();
}
private void upsertClassificationRunNoCommit(final IBranchPath branchPath, final ClassificationRun classificationRun) throws IOException {
final Document updatedDocument = new Document();
Fields.searchOnlyStringField(FIELD_CLASS).addTo(updatedDocument, ClassificationRun.class.getSimpleName());
Fields.searchOnlyStringField(FIELD_ID).addTo(updatedDocument, classificationRun.getId());
Fields.searchOnlyStringField(FIELD_STATUS).addTo(updatedDocument, classificationRun.getStatus().name());
Fields.longField(FIELD_CREATION_DATE).addTo(updatedDocument, classificationRun.getCreationDate().getTime());
Fields.stringField(FIELD_USER_ID).addTo(updatedDocument, classificationRun.getUserId());
Fields.stringField(FIELD_BRANCH_PATH).addTo(updatedDocument, branchPath.getPath());
Fields.storedOnlyStringField(FIELD_SOURCE).addTo(updatedDocument, objectMapper.writer().writeValueAsString(classificationRun));
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.field(FIELD_ID, classificationRun.getId())
.matchAll();
writer.deleteDocuments(query);
writer.addDocument(updatedDocument);
}
public void updateClassificationRunStatus(final String id, final ClassificationStatus newStatus) throws IOException {
updateClassificationRunStatus(id, newStatus, null);
}
public void updateClassificationRunStatus(final String id, final ClassificationStatus newStatus, final GetResultResponseChanges changes) throws IOException {
final Document sourceDocument = getClassificationRunDocument(id);
if (null == sourceDocument) {
return;
}
final IBranchPath branchPath = BranchPathUtils.createPath(sourceDocument.get(FIELD_BRANCH_PATH));
final ClassificationRun classificationRun = objectMapper.reader(ClassificationRun.class).readValue(sourceDocument.get(FIELD_SOURCE));
if (newStatus.equals(classificationRun.getStatus())) {
return;
}
classificationRun.setStatus(newStatus);
if (ClassificationStatus.COMPLETED.equals(newStatus)) {
checkNotNull(changes, "GetResultResponseChanges are required to update a completed classification.");
if (null == classificationRun.getCompletionDate()) {
classificationRun.setCompletionDate(new Date());
}
final ClassificationIssueFlags issueFlags = indexChanges(sourceDocument, id, changes);
classificationRun.setInferredRelationshipChangesFound(!changes.getRelationshipEntries().isEmpty());
classificationRun.setRedundantStatedRelationshipsFound(issueFlags.isRedundantStatedFound());
classificationRun.setEquivalentConceptsFound(issueFlags.isEquivalentConceptsFound());
} else if (ClassificationStatus.SAVED.equals(newStatus)) {
classificationRun.setSaveDate(new Date());
}
upsertClassificationRun(branchPath, classificationRun);
}
private SnomedRelationship getRelationship(IBranchPath branchPath, RelationshipChangeEntry relationshipChange) {
return Iterables.getOnlyElement(SnomedRequests.prepareSearchRelationship()
.setLimit(1)
.filterBySource(relationshipChange.getSource().getId().toString())
.filterByDestination(relationshipChange.getDestination().getId().toString())
.filterByType(relationshipChange.getType().getId().toString())
.filterByGroup(relationshipChange.getGroup())
.build(SnomedDatastoreActivator.REPOSITORY_UUID, branchPath.getPath())
.execute(ApplicationContext.getServiceForClass(IEventBus.class))
.getSync(), null);
}
public void deleteClassificationData(final String classificationId) throws IOException {
// Removes all documents, not just the classification run document
writer.deleteDocuments(new Term(FIELD_ID, classificationId));
commit();
}
private ClassificationIssueFlags indexChanges(Document sourceDocument, String id, final GetResultResponseChanges changes) throws IOException {
final IBranchPath branchPath = BranchPathUtils.createPath(sourceDocument.get(FIELD_BRANCH_PATH));
final String userId = sourceDocument.get(FIELD_USER_ID);
final long creationDate = sourceDocument.getField(FIELD_CREATION_DATE).numericValue().longValue();
final ClassificationIssueFlags classificationIssueFlags = new ClassificationIssueFlags();
final List<AbstractEquivalenceSet> equivalenceSets = changes.getEquivalenceSets();
classificationIssueFlags.setEquivalentConceptsFound(!equivalenceSets.isEmpty());
for (final AbstractEquivalenceSet equivalenceSet : equivalenceSets) {
final List<IEquivalentConcept> convertedEquivalentConcepts = newArrayList();
for (final String equivalentId : equivalenceSet.getConceptIds()) {
addEquivalentConcept(convertedEquivalentConcepts, equivalentId);
}
if (equivalenceSet instanceof EquivalenceSet) {
addEquivalentConcept(convertedEquivalentConcepts, ((EquivalenceSet) equivalenceSet).getSuggestedConceptId());
}
final EquivalentConceptSet convertedEquivalenceSet = new EquivalentConceptSet();
convertedEquivalenceSet.setUnsatisfiable(equivalenceSet.isUnsatisfiable());
convertedEquivalenceSet.setEquivalentConcepts(convertedEquivalentConcepts);
indexResult(id, branchPath, userId, creationDate, EquivalentConceptSet.class, equivalenceSet.getConceptIds().get(0), convertedEquivalenceSet);
}
for (final RelationshipChangeEntry relationshipChange : changes.getRelationshipEntries()) {
final RelationshipChange convertedRelationshipChange = new RelationshipChange();
final ChangeNature changeNature = Nature.INFERRED.equals(relationshipChange.getNature()) ? ChangeNature.INFERRED : ChangeNature.REDUNDANT;
convertedRelationshipChange.setChangeNature(changeNature);
convertedRelationshipChange.setId(relationshipChange.getId());
convertedRelationshipChange.setDestinationId(Long.toString(relationshipChange.getDestination().getId()));
convertedRelationshipChange.setDestinationNegated(relationshipChange.isDestinationNegated());
final String characteristicTypeId;
if (changeNature == ChangeNature.INFERRED) {
characteristicTypeId = Concepts.INFERRED_RELATIONSHIP;
} else {
final SnomedRelationship existingRelationship = getRelationship(branchPath, relationshipChange);
characteristicTypeId = existingRelationship.getCharacteristicType().getConceptId();
convertedRelationshipChange.setId(existingRelationship.getId());
if (changeNature == ChangeNature.REDUNDANT && characteristicTypeId.equals(Concepts.STATED_RELATIONSHIP)) {
classificationIssueFlags.setRedundantStatedFound(true);
}
}
convertedRelationshipChange.setCharacteristicTypeId(characteristicTypeId);
convertedRelationshipChange.setGroup(relationshipChange.getGroup());
final String modifierId = Long.toString(relationshipChange.getModifier().getId());
convertedRelationshipChange.setModifier(Concepts.UNIVERSAL_RESTRICTION_MODIFIER.equals(modifierId) ? RelationshipModifier.UNIVERSAL : RelationshipModifier.EXISTENTIAL);
convertedRelationshipChange.setSourceId(Long.toString(relationshipChange.getSource().getId()));
convertedRelationshipChange.setTypeId(Long.toString(relationshipChange.getType().getId()));
convertedRelationshipChange.setUnionGroup(relationshipChange.getUnionGroup());
indexResult(id, branchPath, userId, creationDate, RelationshipChange.class, convertedRelationshipChange.getSourceId(), convertedRelationshipChange);
}
commit();
return classificationIssueFlags;
}
private void addEquivalentConcept(final List<IEquivalentConcept> convertedEquivalentConcepts, final String equivalentId) {
final EquivalentConcept convertedConcept = new EquivalentConcept();
convertedConcept.setId(equivalentId);
convertedEquivalentConcepts.add(convertedConcept);
}
/**
* @param storageRef
* @param classificationId
* @return
*/
public List<IEquivalentConceptSet> getEquivalentConceptSets(final StorageRef storageRef, final String classificationId) throws IOException {
final Query query = createClassQuery(EquivalentConceptSet.class.getSimpleName(), classificationId, storageRef, null);
return this.<IEquivalentConceptSet>search(query, EquivalentConceptSet.class);
}
/**
* @param storageRef
* @param classificationId
* @param sourceConceptId used to restrict results, can be null
* @param limit
* @param offset
* @return
*/
public IRelationshipChangeList getRelationshipChanges(final StorageRef storageRef, final String classificationId, final String sourceConceptId, final int offset, final int limit) throws IOException {
final Query query = createClassQuery(RelationshipChange.class.getSimpleName(), classificationId, storageRef, sourceConceptId);
final RelationshipChangeList result = new RelationshipChangeList();
result.setTotal(getHitCount(query));
result.setChanges(this.<IRelationshipChange>search(query, RelationshipChange.class, offset, limit));
return result;
}
private <T> void indexResult(final String id, final IBranchPath branchPath, final String userId, final long creationDate,
final Class<T> clazz, String componentId, final T value) throws IOException {
final Document doc = new Document();
Fields.searchOnlyStringField(FIELD_CLASS).addTo(doc, clazz.getSimpleName());
Fields.searchOnlyStringField(FIELD_ID).addTo(doc, id.toString());
Fields.searchOnlyStringField(FIELD_USER_ID).addTo(doc, userId);
Fields.searchOnlyLongField(FIELD_CREATION_DATE).addTo(doc, creationDate);
Fields.searchOnlyStringField(FIELD_BRANCH_PATH).addTo(doc, branchPath.getPath());
Fields.searchOnlyStringField(FIELD_COMPONENT_ID).addTo(doc, componentId);
Fields.storedOnlyStringField(FIELD_SOURCE).addTo(doc, objectMapper.writer().writeValueAsString(value));
writer.addDocument(doc);
}
private Document getClassificationRunDocument(final String id) throws IOException {
final Query query = Fields.newQuery()
.field(FIELD_CLASS, ClassificationRun.class.getSimpleName())
.field(FIELD_ID, id.toString())
.matchAll();
return Iterables.getFirst(search(query, 1), null);
}
private Query createClassQuery(final String className, final String classificationId, StorageRef storageRef, final String componentId) {
final QueryBuilder query = Fields.newQuery()
.field(FIELD_CLASS, className)
.field(FIELD_ID, classificationId)
.field(FIELD_BRANCH_PATH, storageRef.getBranchPath());
if (componentId != null) {
query.field(FIELD_COMPONENT_ID, componentId);
}
return query.matchAll();
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass) throws IOException {
return search(query, sourceClass, Integer.MAX_VALUE);
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass, final int limit) throws IOException {
return search(query, sourceClass, 0, limit);
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass, final int offset, final int limit) throws IOException {
return search(query, sourceClass, Sort.INDEXORDER, offset, limit);
}
private <T> List<T> search(final Query query, final Class<? extends T> sourceClass, Sort sort, final int offset, final int limit) throws IOException {
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
final int totalHits = collector.getTotalHits();
final int saturatedSum = Ints.saturatedCast((long) offset + limit);
final int docsToRetrieve = Ints.min(saturatedSum, searcher.getIndexReader().maxDoc(), totalHits);
final ImmutableList.Builder<T> resultBuilder = ImmutableList.builder();
if (docsToRetrieve < 1) {
return resultBuilder.build();
}
final TopDocs docs = searcher.search(query, null, docsToRetrieve, sort, false, false);
final ScoreDoc[] scoreDocs = docs.scoreDocs;
final ObjectReader reader = objectMapper.reader(sourceClass);
for (int i = offset; i < docsToRetrieve && i < scoreDocs.length; i++) {
final Document sourceDocument = searcher.doc(scoreDocs[i].doc, ImmutableSet.of(FIELD_SOURCE));
final String source = sourceDocument.get(FIELD_SOURCE);
final T deserializedSource = reader.readValue(source);
resultBuilder.add(deserializedSource);
}
return resultBuilder.build();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
private List<Document> search(final Query query, final int limit) throws IOException {
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TopDocs docs = searcher.search(query, null, limit, Sort.INDEXORDER, false, false);
final ImmutableList.Builder<Document> resultBuilder = ImmutableList.builder();
for (final ScoreDoc scoreDoc : docs.scoreDocs) {
resultBuilder.add(searcher.doc(scoreDoc.doc));
}
return resultBuilder.build();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
private int getHitCount(final Query query) throws IOException {
IndexSearcher searcher = null;
try {
searcher = manager.acquire();
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(query, collector);
return collector.getTotalHits();
} finally {
if (null != searcher) {
manager.release(searcher);
}
}
}
private class ClassificationIssueFlags {
private boolean redundantStatedFound;
private boolean equivalentConceptsFound;
public boolean isRedundantStatedFound() {
return redundantStatedFound;
}
public void setRedundantStatedFound(boolean redundantStatedFound) {
this.redundantStatedFound = redundantStatedFound;
}
public boolean isEquivalentConceptsFound() {
return equivalentConceptsFound;
}
public void setEquivalentConceptsFound(boolean equivalentConceptsFound) {
this.equivalentConceptsFound = equivalentConceptsFound;
}
}
}
|
[classification] Enhance redundantStatedRelationshipFound flag state
|
snomed/com.b2international.snowowl.snomed.api.impl/src/com/b2international/snowowl/snomed/api/impl/ClassificationRunIndex.java
|
[classification] Enhance redundantStatedRelationshipFound flag state
|
|
Java
|
apache-2.0
|
e546b4def983e021a433dc844f2869de9089eff7
| 0
|
sangramjadhav/testrs
|
2246d358-2ece-11e5-905b-74de2bd44bed
|
hello.java
|
22464262-2ece-11e5-905b-74de2bd44bed
|
2246d358-2ece-11e5-905b-74de2bd44bed
|
hello.java
|
2246d358-2ece-11e5-905b-74de2bd44bed
|
|
Java
|
apache-2.0
|
79452099b809627f06efb63902b50688a18e27eb
| 0
|
mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.usergrid.persistence.graph.serialization.impl.shard.impl;
import java.util.Collections;
import java.util.Iterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.usergrid.persistence.core.consistency.TimeService;
import org.apache.usergrid.persistence.core.scope.ApplicationScope;
import org.apache.usergrid.persistence.core.util.ValidationUtils;
import org.apache.usergrid.persistence.graph.GraphFig;
import org.apache.usergrid.persistence.graph.MarkedEdge;
import org.apache.usergrid.persistence.graph.SearchByEdgeType;
import org.apache.usergrid.persistence.graph.exception.GraphRuntimeException;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.DirectedEdgeMeta;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.EdgeColumnFamilies;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.EdgeShardSerialization;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.NodeShardAllocation;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.Shard;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardEntryGroup;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardGroupCompaction;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardedEdgeSerialization;
import org.apache.usergrid.persistence.graph.serialization.util.GraphValidation;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.util.TimeUUIDUtils;
/**
* Implementation of the node shard monitor and allocation
*/
public class NodeShardAllocationImpl implements NodeShardAllocation {
private static final Logger LOG = LoggerFactory.getLogger( NodeShardAllocationImpl.class );
private final EdgeShardSerialization edgeShardSerialization;
private final EdgeColumnFamilies edgeColumnFamilies;
private final ShardedEdgeSerialization shardedEdgeSerialization;
private final TimeService timeService;
private final GraphFig graphFig;
private final ShardGroupCompaction shardGroupCompaction;
@Inject
public NodeShardAllocationImpl( final EdgeShardSerialization edgeShardSerialization,
final EdgeColumnFamilies edgeColumnFamilies,
final ShardedEdgeSerialization shardedEdgeSerialization, final TimeService timeService,
final GraphFig graphFig, final ShardGroupCompaction shardGroupCompaction ) {
this.edgeShardSerialization = edgeShardSerialization;
this.edgeColumnFamilies = edgeColumnFamilies;
this.shardedEdgeSerialization = shardedEdgeSerialization;
this.timeService = timeService;
this.graphFig = graphFig;
this.shardGroupCompaction = shardGroupCompaction;
}
@Override
public Iterator<ShardEntryGroup> getShards( final ApplicationScope scope, final Optional<Shard> maxShardId,
final DirectedEdgeMeta directedEdgeMeta ) {
ValidationUtils.validateApplicationScope( scope );
Preconditions.checkNotNull( maxShardId, "maxShardId cannot be null" );
GraphValidation.validateDirectedEdgeMeta( directedEdgeMeta );
Iterator<Shard> existingShards;
//its a new node, it doesn't need to check cassandra, it won't exist
if ( isNewNode( directedEdgeMeta ) ) {
existingShards = Collections.singleton( Shard.MIN_SHARD ).iterator();
}
else {
existingShards = edgeShardSerialization.getShardMetaData( scope, maxShardId, directedEdgeMeta );
/**
* We didn't get anything out of cassandra, so we need to create the minumum shard
*/
if ( existingShards == null || !existingShards.hasNext() ) {
final MutationBatch batch = edgeShardSerialization.writeShardMeta( scope, Shard.MIN_SHARD, directedEdgeMeta );
try {
batch.execute();
}
catch ( ConnectionException e ) {
throw new RuntimeException( "Unable to connect to casandra", e );
}
existingShards = Collections.singleton( Shard.MIN_SHARD ).iterator();
}
}
return new ShardEntryGroupIterator( existingShards, graphFig.getShardMinDelta(), shardGroupCompaction, scope,
directedEdgeMeta );
}
@Override
public boolean auditShard( final ApplicationScope scope, final ShardEntryGroup shardEntryGroup,
final DirectedEdgeMeta directedEdgeMeta ) {
ValidationUtils.validateApplicationScope( scope );
GraphValidation.validateShardEntryGroup( shardEntryGroup );
GraphValidation.validateDirectedEdgeMeta( directedEdgeMeta );
Preconditions.checkNotNull( shardEntryGroup, "shardEntryGroup cannot be null" );
/**
* Nothing to do, it's been created very recently, we don't create a new one
*/
if ( shardEntryGroup.isCompactionPending() ) {
return false;
}
//we can't allocate, we have more than 1 write shard currently. We need to compact first
if ( shardEntryGroup.entrySize() != 1 ) {
return false;
}
/**
* Check the min shard in our system
*/
final Shard shard = shardEntryGroup.getMinShard();
if ( shard.getCreatedTime() >= getMinTime() ) {
return false;
}
/**
* Check out if we have a count for our shard allocation
*/
final long shardSize = graphFig.getShardSize();
/**
* We want to allocate a new shard as close to the max value as possible. This way if we're filling up a
* shard rapidly, we split it near the head of the values.
* Further checks to this group will result in more splits, similar to creating a tree type structure and
* splitting each node.
*
* This means that the lower shard can be re-split later if it is still too large. We do the division to
* truncate
* to a split point < what our current max is that would be approximately be our pivot ultimately if we split
* from the
* lower bound and moved forward. Doing this will stop the current shard from expanding and avoid a point
* where we cannot
* ultimately compact to the correct shard size.
*/
/**
* Allocate the shard
*/
final Iterator<MarkedEdge> edges = directedEdgeMeta
.loadEdges( shardedEdgeSerialization, edgeColumnFamilies, scope, shardEntryGroup.getReadShards(), 0,
SearchByEdgeType.Order.ASCENDING );
if ( !edges.hasNext() ) {
LOG.warn(
"Tried to allocate a new shard for edge meta data {}, " + "but no max value could be found in that row",
directedEdgeMeta );
return false;
}
MarkedEdge marked = null;
/**
* Advance to the pivot point we should use. Once it's compacted, we can split again.
* We either want to take the first one (unlikely) or we take our total count - the shard size.
* If this is a negative number, we're approaching our max count for this shard, so the first
* element will suffice.
*/
for ( long i = 1; edges.hasNext(); i++ ) {
//we hit a pivot shard, set it since it could be the last one we encounter
if ( i % shardSize == 0 ) {
marked = edges.next();
}
else {
edges.next();
}
}
/**
* Sanity check in case we audit before we have a full shard
*/
if ( marked == null ) {
LOG.trace( "Shard {} in shard group {} not full, not splitting", shardEntryGroup );
return false;
}
final long createTimestamp = timeService.getCurrentTime();
final Shard newShard = new Shard( marked.getTimestamp(), createTimestamp, false );
LOG.info( "Allocating new shard {} for edge meta {}", newShard, directedEdgeMeta );
final MutationBatch batch = this.edgeShardSerialization.writeShardMeta( scope, newShard, directedEdgeMeta );
try {
batch.execute();
}
catch ( ConnectionException e ) {
throw new RuntimeException( "Unable to connect to casandra", e );
}
return true;
}
@Override
public long getMinTime() {
final long minimumAllowed = 2 * graphFig.getShardCacheTimeout();
final long minDelta = graphFig.getShardMinDelta();
if ( minDelta < minimumAllowed ) {
throw new GraphRuntimeException( String
.format( "You must configure the property %s to be >= 2 x %s. Otherwise you risk losing data",
GraphFig.SHARD_MIN_DELTA, GraphFig.SHARD_CACHE_TIMEOUT ) );
}
return timeService.getCurrentTime() - minDelta;
}
/**
* Return true if the node has been created within our timeout. If this is the case, we dont' need to check
* cassandra, we know it won't exist
*/
private boolean isNewNode( DirectedEdgeMeta directedEdgeMeta ) {
//TODO: TN this is broken....
//The timeout is in milliseconds. Time for a time uuid is 1/10000 of a milli, so we need to get the units
// correct
final long timeoutDelta = graphFig.getShardCacheTimeout();
final long timeNow = timeService.getCurrentTime();
boolean isNew = true;
for ( DirectedEdgeMeta.NodeMeta node : directedEdgeMeta.getNodes() ) {
//short circuit
if ( !isNew || node.getId().getUuid().version() > 2 ) {
return false;
}
final long uuidTime = TimeUUIDUtils.getTimeFromUUID( node.getId().getUuid() );
final long newExpirationTimeout = uuidTime + timeoutDelta;
//our expiration is after our current time, treat it as new
isNew = isNew && newExpirationTimeout > timeNow;
}
return isNew;
}
}
|
stack/corepersistence/graph/src/main/java/org/apache/usergrid/persistence/graph/serialization/impl/shard/impl/NodeShardAllocationImpl.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.usergrid.persistence.graph.serialization.impl.shard.impl;
import java.util.Collections;
import java.util.Iterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.usergrid.persistence.core.consistency.TimeService;
import org.apache.usergrid.persistence.core.scope.ApplicationScope;
import org.apache.usergrid.persistence.core.util.ValidationUtils;
import org.apache.usergrid.persistence.graph.GraphFig;
import org.apache.usergrid.persistence.graph.MarkedEdge;
import org.apache.usergrid.persistence.graph.SearchByEdgeType;
import org.apache.usergrid.persistence.graph.exception.GraphRuntimeException;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.DirectedEdgeMeta;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.EdgeColumnFamilies;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.EdgeShardSerialization;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.NodeShardAllocation;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.Shard;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardEntryGroup;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardGroupCompaction;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardedEdgeSerialization;
import org.apache.usergrid.persistence.graph.serialization.util.GraphValidation;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.util.TimeUUIDUtils;
/**
* Implementation of the node shard monitor and allocation
*/
public class NodeShardAllocationImpl implements NodeShardAllocation {
private static final Logger LOG = LoggerFactory.getLogger( NodeShardAllocationImpl.class );
private final EdgeShardSerialization edgeShardSerialization;
private final EdgeColumnFamilies edgeColumnFamilies;
private final ShardedEdgeSerialization shardedEdgeSerialization;
private final TimeService timeService;
private final GraphFig graphFig;
private final ShardGroupCompaction shardGroupCompaction;
@Inject
public NodeShardAllocationImpl( final EdgeShardSerialization edgeShardSerialization,
final EdgeColumnFamilies edgeColumnFamilies,
final ShardedEdgeSerialization shardedEdgeSerialization, final TimeService timeService,
final GraphFig graphFig, final ShardGroupCompaction shardGroupCompaction ) {
this.edgeShardSerialization = edgeShardSerialization;
this.edgeColumnFamilies = edgeColumnFamilies;
this.shardedEdgeSerialization = shardedEdgeSerialization;
this.timeService = timeService;
this.graphFig = graphFig;
this.shardGroupCompaction = shardGroupCompaction;
}
@Override
public Iterator<ShardEntryGroup> getShards( final ApplicationScope scope, final Optional<Shard> maxShardId,
final DirectedEdgeMeta directedEdgeMeta ) {
ValidationUtils.validateApplicationScope( scope );
Preconditions.checkNotNull( maxShardId, "maxShardId cannot be null" );
GraphValidation.validateDirectedEdgeMeta( directedEdgeMeta );
Iterator<Shard> existingShards;
//its a new node, it doesn't need to check cassandra, it won't exist
if ( isNewNode( directedEdgeMeta ) ) {
existingShards = Collections.singleton( Shard.MIN_SHARD ).iterator();
}
else {
existingShards = edgeShardSerialization.getShardMetaData( scope, maxShardId, directedEdgeMeta );
/**
* We didn't get anything out of cassandra, so we need to create the minumum shard
*/
if ( existingShards == null || !existingShards.hasNext() ) {
final MutationBatch batch = edgeShardSerialization.writeShardMeta( scope, Shard.MIN_SHARD, directedEdgeMeta );
try {
batch.execute();
}
catch ( ConnectionException e ) {
throw new RuntimeException( "Unable to connect to casandra", e );
}
existingShards = Collections.singleton( Shard.MIN_SHARD ).iterator();
}
}
return new ShardEntryGroupIterator( existingShards, graphFig.getShardMinDelta(), shardGroupCompaction, scope,
directedEdgeMeta );
}
@Override
public boolean auditShard( final ApplicationScope scope, final ShardEntryGroup shardEntryGroup,
final DirectedEdgeMeta directedEdgeMeta ) {
ValidationUtils.validateApplicationScope( scope );
GraphValidation.validateShardEntryGroup( shardEntryGroup );
GraphValidation.validateDirectedEdgeMeta( directedEdgeMeta );
Preconditions.checkNotNull( shardEntryGroup, "shardEntryGroup cannot be null" );
/**
* Nothing to do, it's been created very recently, we don't create a new one
*/
if ( shardEntryGroup.isCompactionPending() ) {
return false;
}
//we can't allocate, we have more than 1 write shard currently. We need to compact first
if ( shardEntryGroup.entrySize() != 1 ) {
return false;
}
/**
* Check the min shard in our system
*/
final Shard shard = shardEntryGroup.getMinShard();
if ( shard.getCreatedTime() >= getMinTime() ) {
return false;
}
/**
* Check out if we have a count for our shard allocation
*/
final long shardSize = graphFig.getShardSize();
/**
* We want to allocate a new shard as close to the max value as possible. This way if we're filling up a
* shard rapidly, we split it near the head of the values.
* Further checks to this group will result in more splits, similar to creating a tree type structure and
* splitting each node.
*
* This means that the lower shard can be re-split later if it is still too large. We do the division to
* truncate
* to a split point < what our current max is that would be approximately be our pivot ultimately if we split
* from the
* lower bound and moved forward. Doing this will stop the current shard from expanding and avoid a point
* where we cannot
* ultimately compact to the correct shard size.
*/
/**
* Allocate the shard
*/
final Iterator<MarkedEdge> edges = directedEdgeMeta
.loadEdges( shardedEdgeSerialization, edgeColumnFamilies, scope, shardEntryGroup.getReadShards(), 0,
SearchByEdgeType.Order.ASCENDING );
if ( !edges.hasNext() ) {
LOG.warn(
"Tried to allocate a new shard for edge meta data {}, " + "but no max value could be found in that row",
directedEdgeMeta );
return false;
}
MarkedEdge marked = null;
/**
* Advance to the pivot point we should use. Once it's compacted, we can split again.
* We either want to take the first one (unlikely) or we take our total count - the shard size.
* If this is a negative number, we're approaching our max count for this shard, so the first
* element will suffice.
*/
for ( long i = 1; edges.hasNext(); i++ ) {
//we hit a pivot shard, set it since it could be the last one we encounter
if ( i % shardSize == 0 ) {
marked = edges.next();
}
else {
edges.next();
}
}
/**
* Sanity check in case we audit before we have a full shard
*/
if ( marked == null ) {
LOG.info( "Shard {} in shard group {} not full, not splitting", shardEntryGroup );
return false;
}
final long createTimestamp = timeService.getCurrentTime();
final Shard newShard = new Shard( marked.getTimestamp(), createTimestamp, false );
LOG.info( "Allocating new shard {} for edge meta {}", newShard, directedEdgeMeta );
final MutationBatch batch = this.edgeShardSerialization.writeShardMeta( scope, newShard, directedEdgeMeta );
try {
batch.execute();
}
catch ( ConnectionException e ) {
throw new RuntimeException( "Unable to connect to casandra", e );
}
return true;
}
@Override
public long getMinTime() {
final long minimumAllowed = 2 * graphFig.getShardCacheTimeout();
final long minDelta = graphFig.getShardMinDelta();
if ( minDelta < minimumAllowed ) {
throw new GraphRuntimeException( String
.format( "You must configure the property %s to be >= 2 x %s. Otherwise you risk losing data",
GraphFig.SHARD_MIN_DELTA, GraphFig.SHARD_CACHE_TIMEOUT ) );
}
return timeService.getCurrentTime() - minDelta;
}
/**
* Return true if the node has been created within our timeout. If this is the case, we dont' need to check
* cassandra, we know it won't exist
*/
private boolean isNewNode( DirectedEdgeMeta directedEdgeMeta ) {
//TODO: TN this is broken....
//The timeout is in milliseconds. Time for a time uuid is 1/10000 of a milli, so we need to get the units
// correct
final long timeoutDelta = graphFig.getShardCacheTimeout();
final long timeNow = timeService.getCurrentTime();
boolean isNew = true;
for ( DirectedEdgeMeta.NodeMeta node : directedEdgeMeta.getNodes() ) {
//short circuit
if ( !isNew || node.getId().getUuid().version() > 2 ) {
return false;
}
final long uuidTime = TimeUUIDUtils.getTimeFromUUID( node.getId().getUuid() );
final long newExpirationTimeout = uuidTime + timeoutDelta;
//our expiration is after our current time, treat it as new
isNew = isNew && newExpirationTimeout > timeNow;
}
return isNew;
}
}
|
Moving log message to trace as it occurs a lot during normal runtime traffic.
|
stack/corepersistence/graph/src/main/java/org/apache/usergrid/persistence/graph/serialization/impl/shard/impl/NodeShardAllocationImpl.java
|
Moving log message to trace as it occurs a lot during normal runtime traffic.
|
|
Java
|
apache-2.0
|
8cb9563692dc903e54c3b601873e6fe22128afaf
| 0
|
pkozelka/maven,cstamas/maven,dsyer/maven,stephenc/maven,njuneau/maven,kidaa/maven-1,stephenc/maven,olamy/maven,rogerchina/maven,trajano/maven,barthel/maven,wangyuesong0/maven,changbai1980/maven,josephw/maven,changbai1980/maven,likaiwalkman/maven,olamy/maven,lbndev/maven,olamy/maven,gorcz/maven,Tibor17/maven,mcculls/maven,Mounika-Chirukuri/maven,aheritier/maven,xasx/maven,vedmishr/demo1,aheritier/maven,apache/maven,mcculls/maven,karthikjaps/maven,dsyer/maven,Tibor17/maven,cstamas/maven,karthikjaps/maven,skitt/maven,njuneau/maven,josephw/maven,skitt/maven,Mounika-Chirukuri/maven,wangyuesong/maven,apache/maven,ChristianSchulte/maven,mcculls/maven,pkozelka/maven,rogerchina/maven,atanasenko/maven,kidaa/maven-1,Mounika-Chirukuri/maven,xasx/maven,stephenc/maven,lbndev/maven,barthel/maven,Distrotech/maven,trajano/maven,barthel/maven,Distrotech/maven,mizdebsk/maven,wangyuesong0/maven,wangyuesong0/maven,mizdebsk/maven,rogerchina/maven,aheritier/maven,ChristianSchulte/maven,keith-turner/maven,karthikjaps/maven,atanasenko/maven,dsyer/maven,wangyuesong/maven,njuneau/maven,runepeter/maven-deploy-plugin-2.8.1,ChristianSchulte/maven,likaiwalkman/maven,wangyuesong/maven,apache/maven,vedmishr/demo1,keith-turner/maven,vedmishr/demo1,mizdebsk/maven,trajano/maven,changbai1980/maven,skitt/maven,pkozelka/maven,josephw/maven,gorcz/maven,xasx/maven,lbndev/maven,gorcz/maven,runepeter/maven-deploy-plugin-2.8.1,keith-turner/maven,kidaa/maven-1,atanasenko/maven,likaiwalkman/maven,cstamas/maven
|
package org.apache.maven.artifact.transform;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.manager.WagonManager;
import org.apache.maven.artifact.metadata.AbstractVersionArtifactMetadata;
import org.apache.maven.artifact.metadata.ArtifactMetadataRetrievalException;
import org.apache.maven.artifact.metadata.VersionArtifactMetadata;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Describes a version transformation during artifact resolution.
*
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
* @version $Id$
*/
public abstract class AbstractVersionTransformation
extends AbstractLogEnabled
implements ArtifactTransformation
{
protected WagonManager wagonManager;
/**
* @todo very primitve. Probably we can cache artifacts themselves in a central location, as well as reset the flag over time in a long running process.
*/
private static Set resolvedArtifactCache = new HashSet();
protected String resolveVersion( Artifact artifact, ArtifactRepository localRepository, List remoteRepositories )
throws ArtifactMetadataRetrievalException
{
VersionArtifactMetadata localMetadata;
try
{
localMetadata = readFromLocalRepository( artifact, localRepository );
}
catch ( IOException e )
{
throw new ArtifactMetadataRetrievalException( "Error reading local metadata", e );
}
boolean alreadyResolved = alreadyResolved( artifact );
if ( !alreadyResolved )
{
boolean checkedUpdates = false;
for ( Iterator i = remoteRepositories.iterator(); i.hasNext(); )
{
ArtifactRepository repository = (ArtifactRepository) i.next();
ArtifactRepositoryPolicy policy = artifact.isSnapshot() ? repository.getSnapshots()
: repository.getReleases();
if ( !policy.isEnabled() )
{
getLogger().debug( "Skipping disabled repository " + repository.getId() );
}
else
{
// TODO: should be able to calculate this less often
boolean checkForUpdates = policy.checkOutOfDate( localMetadata.getLastModified() );
if ( checkForUpdates )
{
checkedUpdates = true;
getLogger().info(
artifact.getArtifactId() + ": checking for updates from " + repository.getId() );
VersionArtifactMetadata remoteMetadata;
try
{
remoteMetadata = retrieveFromRemoteRepository( artifact, repository, localMetadata,
policy.getChecksumPolicy() );
}
catch ( ResourceDoesNotExistException e )
{
getLogger().debug( "Error resolving artifact version from metadata.", e );
continue;
}
int difference = remoteMetadata.compareTo( localMetadata );
if ( difference > 0 )
{
// remote is newer
artifact.setRepository( repository );
localMetadata = remoteMetadata;
}
}
}
}
// touch the file if it was checked for updates, but don't create it if it doesn't exist to avoid
// storing SNAPSHOT as the actual version which doesn't exist remotely.
if ( checkedUpdates && localMetadata.exists() )
{
localMetadata.storeInLocalRepository( localRepository );
}
resolvedArtifactCache.add( getCacheKey( artifact ) );
}
String version = localMetadata.constructVersion();
// TODO: if the POM and JAR are inconsistent, this might mean that different version of each are used
if ( !artifact.getFile().exists() || localMetadata.newerThanFile( artifact.getFile() ) )
{
if ( getLogger().isInfoEnabled() && !alreadyResolved )
{
if ( version != null && !version.equals( artifact.getBaseVersion() ) )
{
String message = artifact.getArtifactId() + ": resolved to version " + version;
if ( artifact.getRepository() != null )
{
message += " from repository " + artifact.getRepository().getId();
}
else
{
message += " from local repository";
}
getLogger().info( message );
}
}
return version;
}
else
{
if ( getLogger().isInfoEnabled() && !alreadyResolved )
{
// Locally installed file is newer, don't use the resolved version
getLogger().info( artifact.getArtifactId() + ": using locally installed snapshot" );
}
return artifact.getVersion();
}
}
protected VersionArtifactMetadata retrieveFromRemoteRepository( Artifact artifact,
ArtifactRepository remoteRepository,
VersionArtifactMetadata localMetadata,
String checksumPolicy )
throws ArtifactMetadataRetrievalException, ResourceDoesNotExistException
{
AbstractVersionArtifactMetadata metadata = createMetadata( artifact );
metadata.retrieveFromRemoteRepository( remoteRepository, wagonManager, checksumPolicy );
return metadata;
}
protected abstract AbstractVersionArtifactMetadata createMetadata( Artifact artifact );
private VersionArtifactMetadata readFromLocalRepository( Artifact artifact, ArtifactRepository localRepository )
throws IOException
{
AbstractVersionArtifactMetadata metadata = createMetadata( artifact );
metadata.readFromLocalRepository( localRepository );
return metadata;
}
private boolean alreadyResolved( Artifact artifact )
{
return resolvedArtifactCache.contains( getCacheKey( artifact ) );
}
private static String getCacheKey( Artifact artifact )
{
// No type - one per POM
return artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getBaseVersion();
}
}
|
maven-artifact-manager/src/main/java/org/apache/maven/artifact/transform/AbstractVersionTransformation.java
|
package org.apache.maven.artifact.transform;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.manager.WagonManager;
import org.apache.maven.artifact.metadata.AbstractVersionArtifactMetadata;
import org.apache.maven.artifact.metadata.ArtifactMetadataRetrievalException;
import org.apache.maven.artifact.metadata.VersionArtifactMetadata;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
/**
* Describes a version transformation during artifact resolution.
*
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
* @version $Id$
*/
public abstract class AbstractVersionTransformation
extends AbstractLogEnabled
implements ArtifactTransformation
{
protected WagonManager wagonManager;
/**
* @todo very primitve. Probably we can cache artifacts themselves in a central location, as well as reset the flag over time in a long running process.
*/
private static Set resolvedArtifactCache = new HashSet();
protected String resolveVersion( Artifact artifact, ArtifactRepository localRepository, List remoteRepositories )
throws ArtifactMetadataRetrievalException
{
VersionArtifactMetadata localMetadata;
try
{
localMetadata = readFromLocalRepository( artifact, localRepository );
}
catch ( IOException e )
{
throw new ArtifactMetadataRetrievalException( "Error reading local metadata", e );
}
boolean alreadyResolved = alreadyResolved( artifact );
if ( !alreadyResolved )
{
boolean checkedUpdates = false;
for ( Iterator i = remoteRepositories.iterator(); i.hasNext(); )
{
ArtifactRepository repository = (ArtifactRepository) i.next();
ArtifactRepositoryPolicy policy = artifact.isSnapshot() ? repository.getSnapshots()
: repository.getReleases();
if ( !policy.isEnabled() )
{
getLogger().debug( "Skipping disabled repository " + repository.getId() );
}
else
{
// TODO: should be able to calculate this less often
boolean checkForUpdates = policy.checkOutOfDate( localMetadata.getLastModified() );
if ( checkForUpdates )
{
getLogger().info(
artifact.getArtifactId() + ": checking for updates from " + repository.getId() );
VersionArtifactMetadata remoteMetadata;
try
{
remoteMetadata = retrieveFromRemoteRepository( artifact, repository, localMetadata,
policy.getChecksumPolicy() );
// we must only flag this after checking for updates, otherwise subsequent attempts will look
// for SNAPSHOT without checking the metadata
checkedUpdates = true;
}
catch ( ResourceDoesNotExistException e )
{
getLogger().debug( "Error resolving artifact version from metadata.", e );
continue;
}
int difference = remoteMetadata.compareTo( localMetadata );
if ( difference > 0 )
{
// remote is newer
artifact.setRepository( repository );
localMetadata = remoteMetadata;
}
}
}
}
if ( checkedUpdates )
{
localMetadata.storeInLocalRepository( localRepository );
}
resolvedArtifactCache.add( getCacheKey( artifact ) );
}
String version = localMetadata.constructVersion();
// TODO: if the POM and JAR are inconsistent, this might mean that different version of each are used
if ( !artifact.getFile().exists() || localMetadata.newerThanFile( artifact.getFile() ) )
{
if ( getLogger().isInfoEnabled() && !alreadyResolved )
{
if ( version != null && !version.equals( artifact.getBaseVersion() ) )
{
String message = artifact.getArtifactId() + ": resolved to version " + version;
if ( artifact.getRepository() != null )
{
message += " from repository " + artifact.getRepository().getId();
}
else
{
message += " from local repository";
}
getLogger().info( message );
}
}
return version;
}
else
{
if ( getLogger().isInfoEnabled() && !alreadyResolved )
{
// Locally installed file is newer, don't use the resolved version
getLogger().info( artifact.getArtifactId() + ": using locally installed snapshot" );
}
return artifact.getVersion();
}
}
protected VersionArtifactMetadata retrieveFromRemoteRepository( Artifact artifact,
ArtifactRepository remoteRepository,
VersionArtifactMetadata localMetadata,
String checksumPolicy )
throws ArtifactMetadataRetrievalException, ResourceDoesNotExistException
{
AbstractVersionArtifactMetadata metadata = createMetadata( artifact );
metadata.retrieveFromRemoteRepository( remoteRepository, wagonManager, checksumPolicy );
return metadata;
}
protected abstract AbstractVersionArtifactMetadata createMetadata( Artifact artifact );
private VersionArtifactMetadata readFromLocalRepository( Artifact artifact, ArtifactRepository localRepository )
throws IOException
{
AbstractVersionArtifactMetadata metadata = createMetadata( artifact );
metadata.readFromLocalRepository( localRepository );
return metadata;
}
private boolean alreadyResolved( Artifact artifact )
{
return resolvedArtifactCache.contains( getCacheKey( artifact ) );
}
private static String getCacheKey( Artifact artifact )
{
// No type - one per POM
return artifact.getGroupId() + ":" + artifact.getArtifactId() + ":" + artifact.getBaseVersion();
}
}
|
fix this once and for all. This covers the case of having a local snapshot that was not found remotely, as well as the previous case it was reverted for - avoiding creating the metadata when it didn't exist remotely.
git-svn-id: 2c527eb49caa05e19d6b2be874bf74fa9d7ea670@226512 13f79535-47bb-0310-9956-ffa450edef68
|
maven-artifact-manager/src/main/java/org/apache/maven/artifact/transform/AbstractVersionTransformation.java
|
fix this once and for all. This covers the case of having a local snapshot that was not found remotely, as well as the previous case it was reverted for - avoiding creating the metadata when it didn't exist remotely.
|
|
Java
|
apache-2.0
|
4e72ced3e76e5789bc3d80b9079e30b2ea29647b
| 0
|
SourcePond/checksum
|
/*Copyright (C) 2015 Roland Hauser, <sourcepond@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
package ch.sourcepond.utils.checksum;
import java.io.IOException;
import java.security.MessageDigest;
/**
* Represents a checksum based on a specific hashing algorithm. See
* {@link MessageDigest} for further information.
*
*/
public interface Checksum {
/**
* Returns the algorithm name used to calculate this checksum.
*
* @return Algorithm name, never {@code null}
*/
String getAlgorithm();
/**
* Calculates the checksum and returns the result as byte array. The length
* of the array depends on the used hashing algorithm. See
* <a href="http://docs.oracle.com/javase/7/docs/technotes/guides/security/
* StandardNames.html#MessageDigest">MessageDigest Algorithms</a> for
* further information.
*
* @return The calculated checksum as byte array, never {@code null}
* @throws IOException
* Thrown, if the necessary data could not read from its source
* for any reason.
* @throws InterruptedException
* Thrown, if the calculation of the checksum has been
* interrupted.
*/
byte[] getValue() throws IOException, InterruptedException;
/**
* Calculates the checksum and returns the result as hex-string. See
* {@link #getValue()} for further information.
*
* @return The calculated checksum as hex-string, never {@code null}
* @throws IOException
* Thrown, if the necessary data could not read from its source
* for any reason.
* @throws InterruptedException
* Thrown, if the calculation of the checksum has been
* interrupted.
*/
String getHexValue() throws IOException;
}
|
src/main/java/ch/sourcepond/utils/checksum/Checksum.java
|
/*Copyright (C) 2015 Roland Hauser, <sourcepond@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.*/
package ch.sourcepond.utils.checksum;
import java.io.IOException;
import java.security.MessageDigest;
/**
* Represents a checksum based on a specific hashing algorithm. See
* {@link MessageDigest} for further information.
*
*/
public interface Checksum {
/**
* Returns the algorithm name used to calculate this checksum.
*
* @return Algorithm name, never {@code null}
*/
String getAlgorithm();
/**
* Calculates the checksum and returns the result as byte array. The length
* of the array depends on the used hashing algorithm. See
* <a href="http://docs.oracle.com/javase/7/docs/technotes/guides/security/
* StandardNames.html#MessageDigest">MessageDigest Algorithms</a> for
* further information.
*
* @return The calculated checksum as byte array, never {@code null}
* @throws IOException
* Thrown, if the necessary data could not read from its source
* for any reason.
* @throws Thrown,
* if the calculation of the checksum has been interrupted.
*/
byte[] getValue() throws IOException, InterruptedException;
/**
* Calculates the checksum and returns the result as hex-string. See
* {@link #getValue()} for further information.
*
* @return The calculated checksum as hex-string, never {@code null}
* @throws IOException
* Thrown, if the necessary data could not read from its source
* for any reason.
*/
String getHexValue() throws IOException;
}
|
Added exception
|
src/main/java/ch/sourcepond/utils/checksum/Checksum.java
|
Added exception
|
|
Java
|
apache-2.0
|
069afe857cb922a5b8baf162d3c946c8a67387ca
| 0
|
freme-project/Broker,freme-project/Broker,freme-project/Broker
|
package eu.freme.broker.eservices;
import com.google.gson.Gson;
import com.google.gson.stream.MalformedJsonException;
import eu.freme.broker.exception.BadRequestException;
import eu.freme.eservices.epublishing.EPublishingService;
import eu.freme.eservices.epublishing.exception.EPubCreationException;
import eu.freme.eservices.epublishing.exception.InvalidZipException;
import eu.freme.eservices.epublishing.webservice.Metadata;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.annotation.MultipartConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
/**
*
* @author Pieter Heyvaert <pheyvaer.heyvaert@ugent.be>
*/
@RestController
public class EPublishing {
private static final Logger logger = Logger.getLogger(EPublishing.class.getName());
private static final long maxUploadSize = 1024 * 1024 * 200;
@Autowired
EPublishingService entityAPI;
@RequestMapping(value = "/e-publishing/html", method = RequestMethod.POST)
public ResponseEntity<byte[]> htmlToEPub(@RequestParam("htmlZip") MultipartFile file, @RequestParam("metadata") String jMetadata) {
if (file.getSize() > maxUploadSize) {
double size = maxUploadSize / (1024.0 * 1024);
return new ResponseEntity<>(new byte[0], HttpStatus.BAD_REQUEST);
//throw new BadRequestException(String.format("The uploaded file is too large. The maximum file size for uploads is %.2f MB", size));
}
try {
Gson gson = new Gson();
Metadata metadata = gson.fromJson(jMetadata, Metadata.class);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add("Content-Disposition", "attachment");
return new ResponseEntity<>(entityAPI.createEPUB(metadata, file.getInputStream()), responseHeaders, HttpStatus.OK);
} catch (MalformedJsonException | InvalidZipException | EPubCreationException ex) {
logger.log(Level.SEVERE, null, ex);
return new ResponseEntity<>(new byte[0], HttpStatus.BAD_REQUEST);
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
return new ResponseEntity<>(new byte[0], HttpStatus.INTERNAL_SERVER_ERROR);
}
}
}
|
src/main/java/eu/freme/broker/eservices/EPublishing.java
|
package eu.freme.broker.eservices;
import com.google.gson.Gson;
import com.google.gson.stream.MalformedJsonException;
import eu.freme.broker.exception.BadRequestException;
import eu.freme.eservices.epublishing.EPublishingService;
import eu.freme.eservices.epublishing.exception.EPubCreationException;
import eu.freme.eservices.epublishing.exception.InvalidZipException;
import eu.freme.eservices.epublishing.webservice.Metadata;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.annotation.MultipartConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
/**
*
* @author Pieter Heyvaert <pheyvaer.heyvaert@ugent.be>
*/
@RestController
public class EPublishing {
private static final Logger logger = Logger.getLogger(EPublishing.class.getName());
private static final long maxUploadSize = 1024 * 1024 * 200;
@Autowired
EPublishingService entityAPI;
@RequestMapping(value = "/e-publishing/html", method = RequestMethod.POST)
public ResponseEntity<byte[]> htmlToEPub(@RequestParam("htmlZip") MultipartFile file, @RequestParam("metadata") String jMetadata) {
if (file.getSize() > maxUploadSize) {
double size = maxUploadSize / (1024.0 * 1024);
return new ResponseEntity<>(new byte[0], HttpStatus.BAD_REQUEST);
//throw new BadRequestException(String.format("The uploaded file is too large. The maximum file size for uploads is %.2f MB", size));
}
try {
Gson gson = new Gson();
Metadata metadata = gson.fromJson(jMetadata, Metadata.class);
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.add("Content-Description", "File Transfer");
return new ResponseEntity<>(entityAPI.createEPUB(metadata, file.getInputStream()), responseHeaders, HttpStatus.OK);
} catch (MalformedJsonException | InvalidZipException | EPubCreationException ex) {
logger.log(Level.SEVERE, null, ex);
return new ResponseEntity<>(new byte[0], HttpStatus.BAD_REQUEST);
} catch (IOException ex) {
logger.log(Level.SEVERE, null, ex);
return new ResponseEntity<>(new byte[0], HttpStatus.INTERNAL_SERVER_ERROR);
}
}
}
|
switched from header "Content-Description": "File Transfer" to "Content-Disposition": "attachment"
|
src/main/java/eu/freme/broker/eservices/EPublishing.java
|
switched from header "Content-Description": "File Transfer" to "Content-Disposition": "attachment"
|
|
Java
|
apache-2.0
|
10300edb583d15dbcf6bd6e2b70a5f4eefafc3e2
| 0
|
gbif/occurrence,gbif/occurrence,gbif/occurrence
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.occurrence.download.service;
import org.gbif.api.exception.ServiceUnavailableException;
import org.gbif.api.model.occurrence.Download;
import org.gbif.api.model.occurrence.DownloadRequest;
import org.gbif.api.model.occurrence.PredicateDownloadRequest;
import org.gbif.api.service.occurrence.DownloadRequestService;
import org.gbif.api.service.registry.OccurrenceDownloadService;
import org.gbif.occurrence.common.download.DownloadUtils;
import org.gbif.occurrence.download.service.workflow.DownloadWorkflowParametersBuilder;
import org.gbif.occurrence.mail.BaseEmailModel;
import org.gbif.occurrence.mail.EmailSender;
import org.gbif.occurrence.mail.OccurrenceEmailManager;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.channels.FileChannel;
import java.time.Duration;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Date;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import org.apache.oozie.client.Job;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.OozieClientException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
import org.springframework.web.server.ResponseStatusException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Enums;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Counter;
import io.github.resilience4j.core.IntervalFunction;
import io.github.resilience4j.retry.Retry;
import io.github.resilience4j.retry.RetryConfig;
import lombok.SneakyThrows;
import static org.gbif.occurrence.common.download.DownloadUtils.downloadLink;
import static org.gbif.occurrence.download.service.Constants.NOTIFY_ADMIN;
@Component
public class DownloadRequestServiceImpl implements DownloadRequestService, CallbackService {
private static final Logger LOG = LoggerFactory.getLogger(DownloadRequestServiceImpl.class);
// magic prefix for download keys to indicate these aren't real download files
private static final String NON_DOWNLOAD_PREFIX = "dwca-";
protected static final Set<Download.Status> RUNNING_STATUSES = EnumSet.of(Download.Status.PREPARING,
Download.Status.RUNNING,
Download.Status.SUSPENDED);
private static final Retry DOWNLOAD_SIZE_RETRY =
Retry.of(
"downloadSizeCall",
RetryConfig.<Boolean>custom()
.maxAttempts(3)
.retryOnResult(result -> !result)
.intervalFunction(IntervalFunction.ofExponentialBackoff(Duration.ofSeconds(3)))
.build());
/**
* Map to provide conversions from oozie.Job.Status to Download.Status.
*/
@VisibleForTesting
protected static final ImmutableMap<Job.Status, Download.Status> STATUSES_MAP =
new ImmutableMap.Builder<Job.Status, Download.Status>()
.put(Job.Status.PREP, Download.Status.PREPARING)
.put(Job.Status.PREPPAUSED, Download.Status.PREPARING)
.put(Job.Status.PREMATER, Download.Status.PREPARING)
.put(Job.Status.PREPSUSPENDED, Download.Status.SUSPENDED)
.put(Job.Status.RUNNING, Download.Status.RUNNING)
.put(Job.Status.KILLED, Download.Status.KILLED)
.put(Job.Status.RUNNINGWITHERROR, Download.Status.RUNNING)
.put(Job.Status.DONEWITHERROR, Download.Status.FAILED)
.put(Job.Status.FAILED, Download.Status.FAILED)
.put(Job.Status.PAUSED, Download.Status.RUNNING)
.put(Job.Status.PAUSEDWITHERROR, Download.Status.RUNNING)
.put(Job.Status.SUCCEEDED, Download.Status.SUCCEEDED)
.put(Job.Status.SUSPENDED, Download.Status.SUSPENDED)
.put(Job.Status.SUSPENDEDWITHERROR, Download.Status.SUSPENDED)
.put(Job.Status.IGNORED, Download.Status.FAILED).build();
private static final Counter SUCCESSFUL_DOWNLOADS = Metrics.newCounter(CallbackService.class, "successful_downloads");
private static final Counter FAILED_DOWNLOADS = Metrics.newCounter(CallbackService.class, "failed_downloads");
private static final Counter CANCELLED_DOWNLOADS = Metrics.newCounter(CallbackService.class, "cancelled_downloads");
private final OozieClient client;
private final String portalUrl;
private final String wsUrl;
private final File downloadMount;
private final OccurrenceDownloadService occurrenceDownloadService;
private final DownloadWorkflowParametersBuilder parametersBuilder;
private final OccurrenceEmailManager emailManager;
private final EmailSender emailSender;
private final DownloadLimitsService downloadLimitsService;
@Autowired
public DownloadRequestServiceImpl(OozieClient client,
@Qualifier("oozie.default_properties") Map<String, String> defaultProperties,
@Value("${occurrence.download.portal.url}") String portalUrl,
@Value("${occurrence.download.ws.url}") String wsUrl,
@Value("${occurrence.download.ws.mount}") String wsMountDir,
OccurrenceDownloadService occurrenceDownloadService,
DownloadLimitsService downloadLimitsService,
OccurrenceEmailManager emailManager,
EmailSender emailSender) {
this.client = client;
this.portalUrl = portalUrl;
this.wsUrl = wsUrl;
this.downloadMount = new File(wsMountDir);
this.occurrenceDownloadService = occurrenceDownloadService;
this.parametersBuilder = new DownloadWorkflowParametersBuilder(defaultProperties);
this.downloadLimitsService = downloadLimitsService;
this.emailManager = emailManager;
this.emailSender = emailSender;
}
@Override
public void cancel(String downloadKey) {
try {
Download download = occurrenceDownloadService.get(downloadKey);
if (download != null) {
if (RUNNING_STATUSES.contains(download.getStatus())) {
updateDownloadStatus(download, Download.Status.CANCELLED);
client.kill(DownloadUtils.downloadToWorkflowId(downloadKey));
LOG.info("Download {} cancelled", downloadKey);
}
} else {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, String.format("Download %s not found", downloadKey));
}
} catch (OozieClientException e) {
throw new ServiceUnavailableException("Failed to cancel download " + downloadKey, e);
}
}
@Override
public String create(DownloadRequest request) {
LOG.debug("Trying to create download from request [{}]", request);
Preconditions.checkNotNull(request);
if (request instanceof PredicateDownloadRequest) {
PredicateValidator.validate(((PredicateDownloadRequest) request).getPredicate());
}
try {
String exceedComplexityLimit = downloadLimitsService.exceedsDownloadComplexity(request);
if (exceedComplexityLimit != null) {
LOG.info("Download request refused as it would exceed complexity limits");
throw new ResponseStatusException(HttpStatus.PAYLOAD_TOO_LARGE, "A download limitation is exceeded:\n" + exceedComplexityLimit + "\n");
}
String exceedSimultaneousLimit = downloadLimitsService.exceedsSimultaneousDownloadLimit(request.getCreator());
if (exceedSimultaneousLimit != null) {
LOG.info("Download request refused as it would exceed simultaneous limits");
// Keep HTTP 420 ("Enhance your calm") here.
throw new ResponseStatusException(HttpStatus.METHOD_FAILURE, "A download limitation is exceeded:\n" + exceedSimultaneousLimit + "\n");
}
String jobId = client.run(parametersBuilder.buildWorkflowParameters(request));
LOG.debug("Oozie job id is: [{}]", jobId);
String downloadId = DownloadUtils.workflowToDownloadId(jobId);
persistDownload(request, downloadId);
return downloadId;
} catch (OozieClientException e) {
LOG.error("Failed to create download job", e);
throw new ServiceUnavailableException("Failed to create download job", e);
}
}
@Nullable
@Override
public File getResultFile(String downloadKey) {
String filename;
// avoid check for download in the registry if we have secret non download files with a magic prefix!
if (downloadKey == null || !downloadKey.toLowerCase().startsWith(NON_DOWNLOAD_PREFIX)) {
Download d = occurrenceDownloadService.get(downloadKey);
if (d == null) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Download " + downloadKey + " doesn't exist");
}
if (d.getStatus() == Download.Status.FILE_ERASED) {
throw new ResponseStatusException(HttpStatus.GONE, "Download " + downloadKey + " has been erased\n");
}
if (!d.isAvailable()) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Download " + downloadKey + " is not ready yet");
}
filename = getDownloadFilename(d);
} else {
filename = downloadKey + ".zip";
}
File localFile = new File(downloadMount, filename);
if (localFile.canRead()) {
return localFile;
} else {
throw new IllegalStateException(
"Unable to read download " + downloadKey + " from " + localFile.getAbsolutePath());
}
}
@Nullable
@Override
public InputStream getResult(String downloadKey) {
File localFile = getResultFile(downloadKey);
try {
return new FileInputStream(localFile);
} catch (IOException e) {
throw new IllegalStateException(
"Failed to read download " + downloadKey + " from " + localFile.getAbsolutePath(), e);
}
}
/**
* Processes a callback from Oozie which update the download status.
*/
@Override
public void processCallback(String jobId, String status) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(jobId), "<jobId> may not be null or empty");
Preconditions.checkArgument(!Strings.isNullOrEmpty(status), "<status> may not be null or empty");
Optional<Job.Status> opStatus = Enums.getIfPresent(Job.Status.class, status.toUpperCase());
Preconditions.checkArgument(opStatus.isPresent(), "<status> the requested status is not valid");
String downloadId = DownloadUtils.workflowToDownloadId(jobId);
LOG.debug("Processing callback for jobId [{}] with status [{}]", jobId, status);
Download download = occurrenceDownloadService.get(downloadId);
if (download == null) {
// Download can be null if the oozie reports status before the download is persisted
LOG.info("Download {} not found. [Oozie may be issuing callback before download persisted.]", downloadId);
return;
}
if (Download.Status.SUCCEEDED.equals(download.getStatus()) ||
Download.Status.FAILED.equals(download.getStatus()) ||
Download.Status.KILLED.equals(download.getStatus())) {
// Download has already completed, so perhaps callbacks in rapid succession have been processed out-of-order
LOG.warn("Download {} has finished, but Oozie has sent a RUNNING callback. Ignoring it.", downloadId);
return;
}
BaseEmailModel emailModel;
Download.Status newStatus = STATUSES_MAP.get(opStatus.get());
switch (newStatus) {
case KILLED:
// Keep a manually cancelled download status as opposed to a killed one
if (download.getStatus() == Download.Status.CANCELLED) {
CANCELLED_DOWNLOADS.inc();
return;
}
case FAILED:
LOG.error(NOTIFY_ADMIN, "Got callback for failed query. JobId [{}], Status [{}]", jobId, status);
updateDownloadStatus(download, newStatus);
emailModel = emailManager.generateFailedDownloadEmailModel(download, portalUrl);
emailSender.send(emailModel);
FAILED_DOWNLOADS.inc();
break;
case SUCCEEDED:
SUCCESSFUL_DOWNLOADS.inc();
updateDownloadStatus(download, newStatus);
// notify about download
if (download.getRequest().getSendNotification()) {
emailModel = emailManager.generateSuccessfulDownloadEmailModel(download, portalUrl);
emailSender.send(emailModel);
}
break;
default:
updateDownloadStatus(download, newStatus);
break;
}
}
/**
* Returns the download size in bytes.
*/
@SneakyThrows
private Long getDownloadSize(Download download) {
File downloadFile = new File(downloadMount, getDownloadFilename(download));
if (Retry.decorateSupplier(DOWNLOAD_SIZE_RETRY, downloadFile::canRead).get()) {
long size = downloadFile.length();
//two-fold approach when size is zero
if (size == 0) {
LOG.warn("Reading the file to calculate its size, zero length reported by file.lenght(). file {}", downloadFile);
try (FileChannel fileChannel = FileChannel.open(downloadFile.toPath())) {
return fileChannel.size();
}
}
}
LOG.warn("Download file not found {}", downloadFile.getName());
return 0L;
}
/**
* Persists the download information.
*/
private void persistDownload(DownloadRequest request, String downloadId) {
Download download = new Download();
download.setKey(downloadId);
download.setStatus(Download.Status.PREPARING);
download.setEraseAfter(Date.from(OffsetDateTime.now(ZoneOffset.UTC).plusMonths(6).toInstant()));
download.setDownloadLink(downloadLink(wsUrl, downloadId, request.getType(), request.getFormat().getExtension()));
download.setRequest(request);
occurrenceDownloadService.create(download);
}
/**
* Updates the download status and file size.
*/
private void updateDownloadStatus(Download download, Download.Status newStatus) {
download.setStatus(newStatus);
download.setSize(getDownloadSize(download));
occurrenceDownloadService.update(download);
}
/**
* The download filename with extension.
*/
private String getDownloadFilename(Download download) {
return download.getKey() + download.getRequest().getFormat().getExtension();
}
}
|
occurrence-download-service/src/main/java/org/gbif/occurrence/download/service/DownloadRequestServiceImpl.java
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.occurrence.download.service;
import org.gbif.api.exception.ServiceUnavailableException;
import org.gbif.api.model.occurrence.Download;
import org.gbif.api.model.occurrence.DownloadRequest;
import org.gbif.api.model.occurrence.PredicateDownloadRequest;
import org.gbif.api.service.occurrence.DownloadRequestService;
import org.gbif.api.service.registry.OccurrenceDownloadService;
import org.gbif.occurrence.common.download.DownloadUtils;
import org.gbif.occurrence.download.service.workflow.DownloadWorkflowParametersBuilder;
import org.gbif.occurrence.mail.BaseEmailModel;
import org.gbif.occurrence.mail.EmailSender;
import org.gbif.occurrence.mail.OccurrenceEmailManager;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.channels.FileChannel;
import java.time.Duration;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Date;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import org.apache.oozie.client.Job;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.OozieClientException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
import org.springframework.web.server.ResponseStatusException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Enums;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import com.yammer.metrics.Metrics;
import com.yammer.metrics.core.Counter;
import io.github.resilience4j.core.IntervalFunction;
import io.github.resilience4j.retry.Retry;
import io.github.resilience4j.retry.RetryConfig;
import lombok.SneakyThrows;
import static org.gbif.occurrence.common.download.DownloadUtils.downloadLink;
import static org.gbif.occurrence.download.service.Constants.NOTIFY_ADMIN;
@Component
public class DownloadRequestServiceImpl implements DownloadRequestService, CallbackService {
private static final Logger LOG = LoggerFactory.getLogger(DownloadRequestServiceImpl.class);
// magic prefix for download keys to indicate these aren't real download files
private static final String NON_DOWNLOAD_PREFIX = "dwca-";
protected static final Set<Download.Status> RUNNING_STATUSES = EnumSet.of(Download.Status.PREPARING,
Download.Status.RUNNING,
Download.Status.SUSPENDED);
private static final Retry DOWNLOAD_SIZE_RETRY =
Retry.of(
"downloadSizeCall",
RetryConfig.<Boolean>custom()
.maxAttempts(3)
.retryOnResult(result -> !result)
.intervalFunction(IntervalFunction.ofExponentialBackoff(Duration.ofSeconds(3)))
.build());
/**
* Map to provide conversions from oozie.Job.Status to Download.Status.
*/
@VisibleForTesting
protected static final ImmutableMap<Job.Status, Download.Status> STATUSES_MAP =
new ImmutableMap.Builder<Job.Status, Download.Status>()
.put(Job.Status.PREP, Download.Status.PREPARING)
.put(Job.Status.PREPPAUSED, Download.Status.PREPARING)
.put(Job.Status.PREMATER, Download.Status.PREPARING)
.put(Job.Status.PREPSUSPENDED, Download.Status.SUSPENDED)
.put(Job.Status.RUNNING, Download.Status.RUNNING)
.put(Job.Status.KILLED, Download.Status.KILLED)
.put(Job.Status.RUNNINGWITHERROR, Download.Status.RUNNING)
.put(Job.Status.DONEWITHERROR, Download.Status.FAILED)
.put(Job.Status.FAILED, Download.Status.FAILED)
.put(Job.Status.PAUSED, Download.Status.RUNNING)
.put(Job.Status.PAUSEDWITHERROR, Download.Status.RUNNING)
.put(Job.Status.SUCCEEDED, Download.Status.SUCCEEDED)
.put(Job.Status.SUSPENDED, Download.Status.SUSPENDED)
.put(Job.Status.SUSPENDEDWITHERROR, Download.Status.SUSPENDED)
.put(Job.Status.IGNORED, Download.Status.FAILED).build();
private static final Counter SUCCESSFUL_DOWNLOADS = Metrics.newCounter(CallbackService.class, "successful_downloads");
private static final Counter FAILED_DOWNLOADS = Metrics.newCounter(CallbackService.class, "failed_downloads");
private static final Counter CANCELLED_DOWNLOADS = Metrics.newCounter(CallbackService.class, "cancelled_downloads");
private final OozieClient client;
private final String portalUrl;
private final String wsUrl;
private final File downloadMount;
private final OccurrenceDownloadService occurrenceDownloadService;
private final DownloadWorkflowParametersBuilder parametersBuilder;
private final OccurrenceEmailManager emailManager;
private final EmailSender emailSender;
private final DownloadLimitsService downloadLimitsService;
@Autowired
public DownloadRequestServiceImpl(OozieClient client,
@Qualifier("oozie.default_properties") Map<String, String> defaultProperties,
@Value("${occurrence.download.portal.url}") String portalUrl,
@Value("${occurrence.download.ws.url}") String wsUrl,
@Value("${occurrence.download.ws.mount}") String wsMountDir,
OccurrenceDownloadService occurrenceDownloadService,
DownloadLimitsService downloadLimitsService,
OccurrenceEmailManager emailManager,
EmailSender emailSender) {
this.client = client;
this.portalUrl = portalUrl;
this.wsUrl = wsUrl;
this.downloadMount = new File(wsMountDir);
this.occurrenceDownloadService = occurrenceDownloadService;
this.parametersBuilder = new DownloadWorkflowParametersBuilder(defaultProperties);
this.downloadLimitsService = downloadLimitsService;
this.emailManager = emailManager;
this.emailSender = emailSender;
}
@Override
public void cancel(String downloadKey) {
try {
Download download = occurrenceDownloadService.get(downloadKey);
if (download != null) {
if (RUNNING_STATUSES.contains(download.getStatus())) {
updateDownloadStatus(download, Download.Status.CANCELLED);
client.kill(DownloadUtils.downloadToWorkflowId(downloadKey));
LOG.info("Download {} cancelled", downloadKey);
}
} else {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, String.format("Download %s not found", downloadKey));
}
} catch (OozieClientException e) {
throw new ServiceUnavailableException("Failed to cancel download " + downloadKey, e);
}
}
@Override
public String create(DownloadRequest request) {
LOG.debug("Trying to create download from request [{}]", request);
Preconditions.checkNotNull(request);
if (request instanceof PredicateDownloadRequest) {
PredicateValidator.validate(((PredicateDownloadRequest) request).getPredicate());
}
try {
String exceedComplexityLimit = downloadLimitsService.exceedsDownloadComplexity(request);
if (exceedComplexityLimit != null) {
LOG.info("Download request refused as it would exceed complexity limits");
throw new ResponseStatusException(HttpStatus.PAYLOAD_TOO_LARGE, "A download limitation is exceeded:\n" + exceedComplexityLimit + "\n");
}
String exceedSimultaneousLimit = downloadLimitsService.exceedsSimultaneousDownloadLimit(request.getCreator());
if (exceedSimultaneousLimit != null) {
LOG.info("Download request refused as it would exceed simultaneous limits");
// Keep HTTP 420 ("Enhance your calm") here.
throw new ResponseStatusException(HttpStatus.METHOD_FAILURE, "A download limitation is exceeded:\n" + exceedSimultaneousLimit + "\n");
}
String jobId = client.run(parametersBuilder.buildWorkflowParameters(request));
LOG.debug("Oozie job id is: [{}]", jobId);
String downloadId = DownloadUtils.workflowToDownloadId(jobId);
persistDownload(request, downloadId);
return downloadId;
} catch (OozieClientException e) {
LOG.error("Failed to create download job", e);
throw new ServiceUnavailableException("Failed to create download job", e);
}
}
@Nullable
@Override
public File getResultFile(String downloadKey) {
String filename;
// avoid check for download in the registry if we have secret non download files with a magic prefix!
if (downloadKey == null || !downloadKey.toLowerCase().startsWith(NON_DOWNLOAD_PREFIX)) {
Download d = occurrenceDownloadService.get(downloadKey);
if (d == null) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Download " + downloadKey + " doesn't exist");
}
if (d.getStatus() == Download.Status.FILE_ERASED) {
throw new ResponseStatusException(HttpStatus.GONE, "Download " + downloadKey + " has been erased\n");
}
if (!d.isAvailable()) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Download " + downloadKey + " is not ready yet");
}
filename = getDownloadFilename(d);
} else {
filename = downloadKey + ".zip";
}
File localFile = new File(downloadMount, filename);
if (localFile.canRead()) {
return localFile;
} else {
throw new IllegalStateException(
"Unable to read download " + downloadKey + " from " + localFile.getAbsolutePath());
}
}
@Nullable
@Override
public InputStream getResult(String downloadKey) {
File localFile = getResultFile(downloadKey);
try {
return new FileInputStream(localFile);
} catch (IOException e) {
throw new IllegalStateException(
"Failed to read download " + downloadKey + " from " + localFile.getAbsolutePath(), e);
}
}
/**
* Processes a callback from Oozie which update the download status.
*/
@Override
public void processCallback(String jobId, String status) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(jobId), "<jobId> may not be null or empty");
Preconditions.checkArgument(!Strings.isNullOrEmpty(status), "<status> may not be null or empty");
Optional<Job.Status> opStatus = Enums.getIfPresent(Job.Status.class, status.toUpperCase());
Preconditions.checkArgument(opStatus.isPresent(), "<status> the requested status is not valid");
String downloadId = DownloadUtils.workflowToDownloadId(jobId);
LOG.debug("Processing callback for jobId [{}] with status [{}]", jobId, status);
Download download = occurrenceDownloadService.get(downloadId);
if (download == null) {
// Download can be null if the oozie reports status before the download is persisted
LOG.info("Download {} not found. [Oozie may be issuing callback before download persisted.]", downloadId);
return;
}
if (Download.Status.SUCCEEDED.equals(download.getStatus()) ||
Download.Status.FAILED.equals(download.getStatus()) ||
Download.Status.KILLED.equals(download.getStatus())) {
// Download has already completed, so perhaps callbacks in rapid succession have been processed out-of-order
LOG.warn("Download {} has finished, but Oozie has sent a RUNNING callback. Ignoring it.", downloadId);
return;
}
BaseEmailModel emailModel;
Download.Status newStatus = STATUSES_MAP.get(opStatus.get());
switch (newStatus) {
case KILLED:
// Keep a manually cancelled download status as opposed to a killed one
if (download.getStatus() == Download.Status.CANCELLED) {
CANCELLED_DOWNLOADS.inc();
return;
}
case FAILED:
LOG.error(NOTIFY_ADMIN, "Got callback for failed query. JobId [{}], Status [{}]", jobId, status);
updateDownloadStatus(download, newStatus);
emailModel = emailManager.generateFailedDownloadEmailModel(download, portalUrl);
emailSender.send(emailModel);
FAILED_DOWNLOADS.inc();
break;
case SUCCEEDED:
SUCCESSFUL_DOWNLOADS.inc();
updateDownloadStatus(download, newStatus);
// notify about download
if (download.getRequest().getSendNotification()) {
emailModel = emailManager.generateSuccessfulDownloadEmailModel(download, portalUrl);
emailSender.send(emailModel);
}
break;
default:
updateDownloadStatus(download, newStatus);
break;
}
}
/**
* Returns the download size in bytes.
*/
@SneakyThrows
private Long getDownloadSize(Download download) {
File downloadFile = new File(downloadMount, getDownloadFilename(download));
if (Retry.decorateSupplier(DOWNLOAD_SIZE_RETRY, downloadFile::exists).get()) {
long size = downloadFile.length();
//two-fold approach when size is zero
if (size == 0) {
LOG.warn("Reading the file to calculate its size, zero length reported by file.lenght(). file {}", downloadFile);
try (FileChannel fileChannel = FileChannel.open(downloadFile.toPath())) {
return fileChannel.size();
}
}
}
LOG.warn("Download file not found {}", downloadFile.getName());
return 0L;
}
/**
* Persists the download information.
*/
private void persistDownload(DownloadRequest request, String downloadId) {
Download download = new Download();
download.setKey(downloadId);
download.setStatus(Download.Status.PREPARING);
download.setEraseAfter(Date.from(OffsetDateTime.now(ZoneOffset.UTC).plusMonths(6).toInstant()));
download.setDownloadLink(downloadLink(wsUrl, downloadId, request.getType(), request.getFormat().getExtension()));
download.setRequest(request);
occurrenceDownloadService.create(download);
}
/**
* Updates the download status and file size.
*/
private void updateDownloadStatus(Download download, Download.Status newStatus) {
download.setStatus(newStatus);
download.setSize(getDownloadSize(download));
occurrenceDownloadService.update(download);
}
/**
* The download filename with extension.
*/
private String getDownloadFilename(Download download) {
return download.getKey() + download.getRequest().getFormat().getExtension();
}
}
|
changing exists by canRead
|
occurrence-download-service/src/main/java/org/gbif/occurrence/download/service/DownloadRequestServiceImpl.java
|
changing exists by canRead
|
|
Java
|
apache-2.0
|
f4701a1f97df18b394bbf3f3fcd8c23156d95510
| 0
|
charliemblack/geode,smgoller/geode,charliemblack/geode,masaki-yamakawa/geode,PurelyApplied/geode,davinash/geode,pdxrunner/geode,smanvi-pivotal/geode,jdeppe-pivotal/geode,jdeppe-pivotal/geode,jdeppe-pivotal/geode,charliemblack/geode,davebarnes97/geode,smanvi-pivotal/geode,masaki-yamakawa/geode,davinash/geode,masaki-yamakawa/geode,jdeppe-pivotal/geode,smgoller/geode,PurelyApplied/geode,PurelyApplied/geode,pivotal-amurmann/geode,pdxrunner/geode,deepakddixit/incubator-geode,masaki-yamakawa/geode,prasi-in/geode,deepakddixit/incubator-geode,davinash/geode,charliemblack/geode,prasi-in/geode,pdxrunner/geode,jdeppe-pivotal/geode,smgoller/geode,smanvi-pivotal/geode,jdeppe-pivotal/geode,davebarnes97/geode,smgoller/geode,pivotal-amurmann/geode,PurelyApplied/geode,davebarnes97/geode,shankarh/geode,smanvi-pivotal/geode,davinash/geode,deepakddixit/incubator-geode,davinash/geode,smanvi-pivotal/geode,shankarh/geode,davinash/geode,prasi-in/geode,PurelyApplied/geode,pdxrunner/geode,shankarh/geode,deepakddixit/incubator-geode,pivotal-amurmann/geode,masaki-yamakawa/geode,shankarh/geode,deepakddixit/incubator-geode,pdxrunner/geode,davebarnes97/geode,smgoller/geode,davebarnes97/geode,smgoller/geode,charliemblack/geode,prasi-in/geode,deepakddixit/incubator-geode,shankarh/geode,pivotal-amurmann/geode,davinash/geode,deepakddixit/incubator-geode,pivotal-amurmann/geode,davebarnes97/geode,jdeppe-pivotal/geode,PurelyApplied/geode,davebarnes97/geode,pdxrunner/geode,prasi-in/geode,masaki-yamakawa/geode,masaki-yamakawa/geode,PurelyApplied/geode,pdxrunner/geode,smgoller/geode
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal.membership.gms.membership;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.awaitility.Awaitility;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.distributed.internal.membership.NetView;
import org.apache.geode.distributed.internal.membership.gms.GMSMember;
import org.apache.geode.distributed.internal.membership.gms.ServiceConfig;
import org.apache.geode.distributed.internal.membership.gms.Services;
import org.apache.geode.distributed.internal.membership.gms.Services.Stopper;
import org.apache.geode.distributed.internal.membership.gms.interfaces.Authenticator;
import org.apache.geode.distributed.internal.membership.gms.interfaces.HealthMonitor;
import org.apache.geode.distributed.internal.membership.gms.interfaces.Manager;
import org.apache.geode.distributed.internal.membership.gms.interfaces.Messenger;
import org.apache.geode.distributed.internal.membership.gms.locator.FindCoordinatorRequest;
import org.apache.geode.distributed.internal.membership.gms.locator.FindCoordinatorResponse;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.SearchState;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.TcpClientWrapper;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.ViewCreator;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.ViewReplyProcessor;
import org.apache.geode.distributed.internal.membership.gms.messages.InstallViewMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.JoinRequestMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.JoinResponseMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.LeaveRequestMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.NetworkPartitionMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.RemoveMemberMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.ViewAckMessage;
import org.apache.geode.internal.Version;
import org.apache.geode.security.AuthenticationFailedException;
import org.apache.geode.test.junit.categories.FlakyTest;
import org.apache.geode.test.junit.categories.IntegrationTest;
import org.apache.geode.test.junit.categories.MembershipTest;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.internal.verification.Times;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.verification.Timeout;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.Timer;
import java.util.concurrent.TimeUnit;
@Category({IntegrationTest.class, MembershipTest.class})
public class GMSJoinLeaveJUnitTest {
private Services services;
private ServiceConfig mockConfig;
private DistributionConfig mockDistConfig;
private Authenticator authenticator;
private HealthMonitor healthMonitor;
private InternalDistributedMember gmsJoinLeaveMemberId;
private InternalDistributedMember[] mockMembers;
private InternalDistributedMember mockOldMember;
private Properties credentials = new Properties();
private Messenger messenger;
private GMSJoinLeave gmsJoinLeave;
private Manager manager;
private Stopper stopper;
private InternalDistributedMember removeMember = null;
private InternalDistributedMember leaveMember = null;
public void initMocks() throws IOException {
initMocks(false);
}
public void initMocks(boolean enableNetworkPartition) throws UnknownHostException {
initMocks(enableNetworkPartition, false);
}
public void initMocks(boolean enableNetworkPartition, boolean useTestGMSJoinLeave)
throws UnknownHostException {
mockDistConfig = mock(DistributionConfig.class);
when(mockDistConfig.getEnableNetworkPartitionDetection()).thenReturn(enableNetworkPartition);
when(mockDistConfig.getLocators()).thenReturn("localhost[8888]");
when(mockDistConfig.getSecurityUDPDHAlgo()).thenReturn("");
mockConfig = mock(ServiceConfig.class);
when(mockDistConfig.getStartLocator()).thenReturn("localhost[12345]");
when(mockConfig.getDistributionConfig()).thenReturn(mockDistConfig);
when(mockDistConfig.getLocators()).thenReturn("localhost[12345]");
when(mockDistConfig.getMcastPort()).thenReturn(0);
when(mockDistConfig.getMemberTimeout()).thenReturn(2000);
authenticator = mock(Authenticator.class);
gmsJoinLeaveMemberId = new InternalDistributedMember("localhost", 8887);
messenger = mock(Messenger.class);
when(messenger.getMemberID()).thenReturn(gmsJoinLeaveMemberId);
stopper = mock(Stopper.class);
when(stopper.isCancelInProgress()).thenReturn(false);
manager = mock(Manager.class);
healthMonitor = mock(HealthMonitor.class);
when(healthMonitor.getFailureDetectionPort()).thenReturn(Integer.valueOf(-1));
services = mock(Services.class);
when(services.getAuthenticator()).thenReturn(authenticator);
when(services.getConfig()).thenReturn(mockConfig);
when(services.getMessenger()).thenReturn(messenger);
when(services.getCancelCriterion()).thenReturn(stopper);
when(services.getManager()).thenReturn(manager);
when(services.getHealthMonitor()).thenReturn(healthMonitor);
Timer t = new Timer(true);
when(services.getTimer()).thenReturn(t);
mockMembers = new InternalDistributedMember[4];
for (int i = 0; i < mockMembers.length; i++) {
mockMembers[i] = new InternalDistributedMember("localhost", 8888 + i);
}
mockOldMember = new InternalDistributedMember("localhost", 8700, Version.GFE_56);
if (useTestGMSJoinLeave) {
gmsJoinLeave = new GMSJoinLeaveTest();
} else {
gmsJoinLeave = new GMSJoinLeave();
}
gmsJoinLeave.init(services);
gmsJoinLeave.start();
gmsJoinLeave.started();
}
@After
public void tearDown() throws Exception {
if (gmsJoinLeave != null) {
gmsJoinLeave.stop();
gmsJoinLeave.stopped();
}
}
@Test
public void testFindCoordinatorInView() throws Exception {
initMocks();
int viewId = 1;
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
when(services.getMessenger()).thenReturn(messenger);
// prepare the view
NetView netView = new NetView(mockMembers[0], viewId, mbrs);
SearchState state = gmsJoinLeave.searchState;
state.view = netView;
state.viewId = netView.getViewId();
InternalDistributedMember coordinator = mockMembers[2];
coordinator.setVmViewId(viewId);
// already tried joining using members 0 and 1
Set<InternalDistributedMember> set = new HashSet<>();
mockMembers[0].setVmViewId(viewId - 1);
set.add(mockMembers[0]);
mockMembers[1].setVmViewId(viewId - 1);
set.add(mockMembers[1]);
state.alreadyTried = set;
state.hasContactedAJoinedLocator = true;
// simulate a response being received
InternalDistributedMember sender = mockMembers[2];
FindCoordinatorResponse resp = new FindCoordinatorResponse(coordinator, sender, null, 0);
gmsJoinLeave.processMessage(resp);
// tell GMSJoinLeave that a unit test is running so it won't clear the
// responses collection
gmsJoinLeave.unitTesting.add("findCoordinatorFromView");
// now for the test
boolean result = gmsJoinLeave.findCoordinatorFromView();
assertTrue("should have found coordinator " + mockMembers[2], result);
assertTrue("should have found " + coordinator + " but found " + state.possibleCoordinator,
state.possibleCoordinator == coordinator);
}
@Test
public void testProcessJoinMessageRejectOldMemberVersion() throws IOException {
initMocks();
gmsJoinLeave.processMessage(new JoinRequestMessage(mockOldMember, mockOldMember, null, -1, 0));
assertTrue("JoinRequest should not have been added to view request",
gmsJoinLeave.getViewRequests().size() == 0);
verify(messenger).send(any(JoinResponseMessage.class));
}
@Test
public void testViewWithoutMemberInitiatesForcedDisconnect() throws Exception {
initMocks();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
List<InternalDistributedMember> members = Arrays.asList(mockMembers);
NetView v = new NetView(mockMembers[0], 2, members);
InstallViewMessage message = getInstallViewMessage(v, null, false);
gmsJoinLeave.processMessage(message);
verify(manager).forceDisconnect(any(String.class));
}
@Test
public void testProcessJoinMessageWithBadAuthentication() throws IOException {
initMocks();
when(services.getAuthenticator()).thenReturn(authenticator);
when(authenticator.authenticate(mockMembers[0], credentials))
.thenThrow(new AuthenticationFailedException("we want to fail auth here"));
when(services.getMessenger()).thenReturn(messenger);
gmsJoinLeave
.processMessage(new JoinRequestMessage(mockMembers[0], mockMembers[0], credentials, -1, 0));
assertTrue("JoinRequest should not have been added to view request",
gmsJoinLeave.getViewRequests().size() == 0);
verify(messenger).send(any(JoinResponseMessage.class));
}
@Test
public void testProcessJoinMessageWithAuthenticationButNullCredentials() throws IOException {
initMocks();
when(services.getAuthenticator()).thenReturn(authenticator);
when(authenticator.authenticate(mockMembers[0], null))
.thenThrow(new AuthenticationFailedException("we want to fail auth here"));
when(services.getMessenger()).thenReturn(messenger);
gmsJoinLeave
.processMessage(new JoinRequestMessage(mockMembers[0], mockMembers[0], null, -1, 0));
assertTrue("JoinRequest should not have been added to view request",
gmsJoinLeave.getViewRequests().size() == 0);
verify(messenger).send(any(JoinResponseMessage.class));
}
// This test does not test the actual join process but rather that the join response gets loggedß
@Test
public void testProcessJoinResponseIsRecorded() throws IOException {
initMocks();
when(services.getAuthenticator()).thenReturn(authenticator);
when(authenticator.authenticate(mockMembers[0], null))
.thenThrow(new AuthenticationFailedException("we want to fail auth here"));
when(services.getMessenger()).thenReturn(messenger);
JoinResponseMessage[] joinResponse = gmsJoinLeave.getJoinResponseMessage();
JoinResponseMessage jrm = new JoinResponseMessage(mockMembers[0], new byte[9], 233);
gmsJoinLeave.processMessage(jrm);
// this should NOT logs, this is just to inform member succesful joining
Assert.assertEquals(null, joinResponse[0]);
jrm = new JoinResponseMessage("rejected...", 0);
gmsJoinLeave.processMessage(jrm);
// this should log..
Assert.assertEquals(jrm, joinResponse[0]);
gmsJoinLeave.setJoinResponseMessage(null);
jrm = new JoinResponseMessage(mockMembers[0], new NetView(), 0);
gmsJoinLeave.processMessage(jrm);
// this should log..
Assert.assertEquals(jrm, joinResponse[0]);
}
/**
* prepares and install a view
*
* @throws IOException
*/
private void prepareAndInstallView(InternalDistributedMember coordinator,
List<InternalDistributedMember> members) throws IOException {
int viewId = 1;
when(services.getMessenger()).thenReturn(messenger);
// prepare the view
NetView netView = new NetView(coordinator, viewId, members);
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, true);
gmsJoinLeave.processMessage(installViewMessage);
verify(messenger).send(any(ViewAckMessage.class));
// install the view
installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Assert.assertEquals(netView, gmsJoinLeave.getView());
}
private List<InternalDistributedMember> createMemberList(InternalDistributedMember... members) {
List<InternalDistributedMember> memberList =
new ArrayList<InternalDistributedMember>(members.length);
for (InternalDistributedMember member : members) {
memberList.add(member);
}
return memberList;
}
@Category(FlakyTest.class) // GEODE-2653: flaky due to Thread.sleep
@Test
public void testRemoveMember() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
MethodExecuted removeMessageSent = new MethodExecuted();
when(messenger.send(any(RemoveMemberMessage.class))).thenAnswer(removeMessageSent);
gmsJoinLeave.remove(mockMembers[0], "removing for test");
Thread.sleep(ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL * 2);
assertTrue(removeMessageSent.methodExecuted);
}
@Test
public void testIsMemberLeaving() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0],
createMemberList(mockMembers[0], mockMembers[1], gmsJoinLeaveMemberId));
MethodExecuted removeMessageSent = new MethodExecuted();
when(messenger.send(any(RemoveMemberMessage.class))).thenAnswer(removeMessageSent);
assertFalse(gmsJoinLeave.isMemberLeaving(mockMembers[0]));
assertFalse(gmsJoinLeave.isMemberLeaving(mockMembers[1]));
gmsJoinLeave.remove(mockMembers[0], "removing for test");
assertTrue(gmsJoinLeave.isMemberLeaving(mockMembers[0]));
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[1], "leaving for test");
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
assertTrue(gmsJoinLeave.isMemberLeaving(mockMembers[1]));
}
@Test
public void testRemoveAndLeaveIsNotACrash() throws Exception {
// simultaneous leave & remove requests for a member
// should not result in it's being seen as a crashed member
initMocks();
final int viewInstallationTime = 15000;
when(healthMonitor.checkIfAvailable(any(InternalDistributedMember.class), any(String.class),
any(Boolean.class))).thenReturn(true);
gmsJoinLeave.delayViewCreationForTest(5000); // ensures multiple requests are queued for a view
// change
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
NetView oldView = null;
long giveup = System.currentTimeMillis() + viewInstallationTime;
while (System.currentTimeMillis() < giveup && oldView == null) {
Thread.sleep(500);
oldView = gmsJoinLeave.getView();
}
assertTrue(oldView != null); // it should have become coordinator and installed a view
NetView newView = new NetView(oldView, oldView.getViewId() + 1);
newView.add(mockMembers[1]);
newView.add(mockMembers[2]);
gmsJoinLeave.installView(newView);
gmsJoinLeave.memberShutdown(mockMembers[1], "shutting down for test");
gmsJoinLeave.remove(mockMembers[1], "removing for test");
giveup = System.currentTimeMillis() + viewInstallationTime;
while (System.currentTimeMillis() < giveup
&& gmsJoinLeave.getView().getViewId() == newView.getViewId()) {
Thread.sleep(500);
}
assertTrue(gmsJoinLeave.getView().getViewId() > newView.getViewId());
assertFalse(gmsJoinLeave.getView().getCrashedMembers().contains(mockMembers[1]));
}
@Test
public void testRejectOlderView() throws IOException {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
// try to install an older view where viewId < currentView.viewId
NetView olderNetView = new NetView(mockMembers[0], 0, mbrs);
InstallViewMessage installViewMessage = getInstallViewMessage(olderNetView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Assert.assertNotEquals(gmsJoinLeave.getView(), olderNetView);
}
@Test
public void testForceDisconnectedFromNewView() throws IOException {
initMocks(true);// enabledNetworkPartition;
Manager mockManager = mock(Manager.class);
when(services.getManager()).thenReturn(mockManager);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
int viewId = 2;
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(mockMembers[3]);
// install the view
NetView netView = new NetView(mockMembers[0], viewId, mbrs);
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Assert.assertNotEquals(netView, gmsJoinLeave.getView());
verify(mockManager).forceDisconnect(any(String.class));
}
@SuppressWarnings("rawtypes")
private class MethodExecuted implements Answer {
private boolean methodExecuted = false;
@Override
public Object answer(InvocationOnMock invocation) {
// do we only expect a join response on a failure?
methodExecuted = true;
return null;
}
}
@Test
public void testNonMemberCantRemoveMember() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
// test that a non-member can't remove another member
RemoveMemberMessage msg = new RemoveMemberMessage(mockMembers[0], mockMembers[1], reason);
msg.setSender(new InternalDistributedMember("localhost", 9000));
gmsJoinLeave.processMessage(msg);
assertTrue("RemoveMemberMessage should not have been added to view requests",
gmsJoinLeave.getViewRequests().size() == 0);
}
@Test
public void testDuplicateLeaveRequestDoesNotCauseNewView() throws Exception {
String reason = "testing";
initMocks();
gmsJoinLeave.unitTesting.add("noRandomViewChange");
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
msg = new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
waitForViewAndNoRequestsInProgress(7);
NetView view = gmsJoinLeave.getView();
assertTrue("expected member to be removed: " + mockMembers[0] + "; view: " + view,
!view.contains(mockMembers[0]));
assertTrue("expected member to be in shutdownMembers collection: " + mockMembers[0] + "; view: "
+ view, view.getShutdownMembers().contains(mockMembers[0]));
}
@Test
public void testDuplicateRemoveRequestDoesNotCauseNewView() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
gmsJoinLeave.getView().add(mockMembers[1]);
gmsJoinLeave.unitTesting.add("noRandomViewChange");
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
msg = new RemoveMemberMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
waitForViewAndNoRequestsInProgress(7);
NetView view = gmsJoinLeave.getView();
assertTrue("expected member to be removed: " + mockMembers[0] + "; view: " + view,
!view.contains(mockMembers[0]));
assertTrue(
"expected member to be in crashedMembers collection: " + mockMembers[0] + "; view: " + view,
view.getCrashedMembers().contains(mockMembers[0]));
}
// @Category(FlakyTest.class) // GEODE-2074: timed out waiting for view #7
@Test
public void testDuplicateJoinRequestDoesNotCauseNewView() throws Exception {
initMocks();
when(healthMonitor.checkIfAvailable(any(InternalDistributedMember.class), any(String.class),
any(Boolean.class))).thenReturn(true);
gmsJoinLeave.unitTesting.add("noRandomViewChange");
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
gmsJoinLeave.getView().add(mockMembers[1]);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
JoinRequestMessage msg =
new JoinRequestMessage(gmsJoinLeaveMemberId, mockMembers[2], null, -1, 0);
msg.setSender(mockMembers[2]);
gmsJoinLeave.processMessage(msg);
msg = new JoinRequestMessage(gmsJoinLeaveMemberId, mockMembers[2], null, -1, 0);
msg.setSender(mockMembers[2]);
gmsJoinLeave.processMessage(msg);
waitForViewAndNoRequestsInProgress(7);
NetView view = gmsJoinLeave.getView();
assertTrue("expected member to be added: " + mockMembers[2] + "; view: " + view,
view.contains(mockMembers[2]));
List<InternalDistributedMember> members = view.getMembers();
int occurrences = 0;
for (InternalDistributedMember mbr : members) {
if (mbr.equals(mockMembers[2])) {
occurrences += 1;
}
}
assertTrue("expected member to only be in the view once: " + mockMembers[2] + "; view: " + view,
occurrences == 1);
verify(healthMonitor, times(5)).checkIfAvailable(any(InternalDistributedMember.class),
any(String.class), any(Boolean.class));
}
private void waitForViewAndNoRequestsInProgress(int viewId) throws InterruptedException {
// wait for the view processing thread to collect and process the requests
int sleeps = 0;
while (!gmsJoinLeave.isStopping() && !gmsJoinLeave.getViewCreator().isWaiting()
&& (!gmsJoinLeave.getViewRequests().isEmpty()
|| gmsJoinLeave.getView().getViewId() != viewId)) {
if (sleeps++ > 20) {
throw new RuntimeException("timeout waiting for view #" + viewId + " current view: "
+ gmsJoinLeave.getView() + "; view requests: " + gmsJoinLeave.getViewRequests());
}
Thread.sleep(1000);
}
}
@Test
public void testRemoveCausesForcedDisconnect() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
gmsJoinLeave.getView().add(mockMembers[1]);
RemoveMemberMessage msg =
new RemoveMemberMessage(mockMembers[0], gmsJoinLeave.getMemberID(), reason);
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
verify(manager).forceDisconnect(reason);
}
@Test
public void testLeaveCausesForcedDisconnect() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
gmsJoinLeave.getView().add(mockMembers[1]);
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), gmsJoinLeave.getMemberID(), reason);
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
verify(manager).forceDisconnect(reason);
}
@Test
public void testLeaveOfNonMemberIsNoOp() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
mockMembers[1].setVmViewId(gmsJoinLeave.getView().getViewId() - 1);
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[1], reason);
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected leave request from non-member to be ignored",
gmsJoinLeave.getViewRequests().isEmpty());
}
@Test
public void testBecomeCoordinatorOnStartup() throws Exception {
initMocks();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
long giveup = System.currentTimeMillis() + 20000;
while (System.currentTimeMillis() < giveup && !gmsJoinLeave.isCoordinator()) {
Thread.sleep(1000);
}
assertTrue(gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeCoordinator() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView view = gmsJoinLeave.getView();
view.add(gmsJoinLeaveMemberId);
InternalDistributedMember creator = view.getCreator();
LeaveRequestMessage msg = new LeaveRequestMessage(creator, creator, reason);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected becomeCoordinator to be invoked", gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeCoordinatorThroughRemove() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView view = gmsJoinLeave.getView();
view.add(gmsJoinLeaveMemberId);
InternalDistributedMember creator = view.getCreator();
RemoveMemberMessage msg = new RemoveMemberMessage(creator, creator, reason);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected becomeCoordinator to be invoked", gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeCoordinatorThroughViewChange() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView oldView = gmsJoinLeave.getView();
oldView.add(gmsJoinLeaveMemberId);
NetView view = new NetView(oldView, oldView.getViewId() + 1);
InternalDistributedMember creator = view.getCreator();
view.remove(creator);
InstallViewMessage msg = getInstallViewMessage(view, creator, false);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected it to become coordinator", gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeParticipantThroughViewChange() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView oldView = gmsJoinLeave.getView();
oldView.add(gmsJoinLeaveMemberId);
InternalDistributedMember creator = oldView.getCreator();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
NetView view = new NetView(2, gmsJoinLeave.getView().getViewId() + 1);
view.setCreator(creator);
view.add(creator);
view.add(gmsJoinLeaveMemberId);
InstallViewMessage msg = getInstallViewMessage(view, creator, false);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected it to stop being coordinator", !gmsJoinLeave.isCoordinator());
}
private InstallViewMessage getInstallViewMessage(NetView view, Object credentials,
boolean preparing) {
InstallViewMessage installViewMessage = new InstallViewMessage(view, credentials, preparing);
installViewMessage.setSender(gmsJoinLeaveMemberId);
return installViewMessage;
}
@Test
public void testNetworkPartitionDetected() throws IOException {
initMocks(true);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
// set up a view with sufficient members, then create a new view
// where enough weight is lost to cause a network partition
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(gmsJoinLeaveMemberId);
((GMSMember) mockMembers[1].getNetMember()).setMemberWeight((byte) 20);
NetView newView = new NetView(mockMembers[0], gmsJoinLeave.getView().getViewId() + 1, mbrs);
InstallViewMessage installViewMessage = getInstallViewMessage(newView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Set<InternalDistributedMember> crashes = new HashSet<>();
crashes.add(mockMembers[1]);
crashes.add(mockMembers[2]);
mbrs = new LinkedList<>(mbrs);
mbrs.remove(mockMembers[1]);
mbrs.remove(mockMembers[2]);
NetView partitionView =
new NetView(mockMembers[0], newView.getViewId() + 1, mbrs, Collections.emptySet(), crashes);
installViewMessage = getInstallViewMessage(partitionView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
verify(manager).forceDisconnect(isA(String.class));
verify(manager).quorumLost(crashes, newView);
}
// Possibly modify test to check for network partition message in the force disconnect
@Test
public void testNetworkPartitionMessageReceived() throws Exception {
initMocks();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
NetworkPartitionMessage message = new NetworkPartitionMessage();
gmsJoinLeave.processMessage(message);
verify(manager).forceDisconnect(any(String.class));
}
@Test
public void testQuorumLossNotificationWithNetworkPartitionDetectionDisabled() throws IOException {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
// set up a view with sufficient members, then create a new view
// where enough weight is lost to cause a network partition
List<InternalDistributedMember> mbrs = new LinkedList<>();
Set<InternalDistributedMember> shutdowns = new HashSet<>();
Set<InternalDistributedMember> crashes = new HashSet<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(gmsJoinLeaveMemberId);
((GMSMember) mockMembers[1].getNetMember()).setMemberWeight((byte) 20);
NetView newView = new NetView(mockMembers[0], gmsJoinLeave.getView().getViewId() + 1, mbrs,
shutdowns, crashes);
InstallViewMessage installViewMessage = getInstallViewMessage(newView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
crashes = new HashSet<>(crashes);
crashes.add(mockMembers[1]);
crashes.add(mockMembers[2]);
mbrs = new LinkedList<>(mbrs);
mbrs.remove(mockMembers[1]);
mbrs.remove(mockMembers[2]);
NetView partitionView =
new NetView(mockMembers[0], newView.getViewId() + 1, mbrs, shutdowns, crashes);
installViewMessage = getInstallViewMessage(partitionView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
verify(manager, never()).forceDisconnect(any(String.class));
verify(manager).quorumLost(crashes, newView);
}
@Test
public void testConflictingPrepare() throws Exception {
initMocks(true);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView gmsView = gmsJoinLeave.getView();
NetView newView = new NetView(gmsView, gmsView.getViewId() + 6);
InstallViewMessage msg = getInstallViewMessage(newView, null, true);
gmsJoinLeave.processMessage(msg);
NetView alternateView = new NetView(gmsView, gmsView.getViewId() + 1);
msg = getInstallViewMessage(alternateView, null, true);
gmsJoinLeave.processMessage(msg);
assertTrue(gmsJoinLeave.getPreparedView().equals(newView));
}
@Test
public void testNoViewAckCausesRemovalMessage() throws Exception {
initMocks(true);
when(healthMonitor.checkIfAvailable(any(InternalDistributedMember.class), any(String.class),
any(Boolean.class))).thenReturn(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView oldView = gmsJoinLeave.getView();
NetView newView = new NetView(oldView, oldView.getViewId() + 1);
// the new view will remove the old coordinator (normal shutdown) and add a new member
// who will not ack the view. This should cause it to be removed from the system
// with a RemoveMemberMessage
newView.add(mockMembers[2]);
newView.remove(mockMembers[0]);
InstallViewMessage installViewMessage = getInstallViewMessage(newView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
long giveup = System.currentTimeMillis() + (2000 * 3); // this test's member-timeout * 3
while (System.currentTimeMillis() < giveup
&& gmsJoinLeave.getView().getViewId() == oldView.getViewId()) {
Thread.sleep(1000);
}
assertTrue(gmsJoinLeave.isCoordinator());
// wait for suspect processing
Thread.sleep(10000);
verify(healthMonitor, atLeast(1)).checkIfAvailable(isA(DistributedMember.class),
isA(String.class), isA(Boolean.class));
// verify(messenger, atLeast(1)).send(isA(RemoveMemberMessage.class));
}
/**
* This tests a member shutdown using the memberShutdown call (simulating the call from
* DistributionManager) The gmsJoinLeaveMemberId is not the coordinator but should now become the
* coordinator.
*/
@Test
public void testCoordinatorShutsdownAndWeBecomeCoordinatorAndSendOutCorrectView()
throws Exception {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId,
mockMembers[1], mockMembers[2], mockMembers[3]));
Assert.assertFalse(gmsJoinLeave.isCoordinator());
// The coordinator shuts down
gmsJoinLeave.memberShutdown(mockMembers[0], "Shutdown");
NetView nextView = gmsJoinLeave.getViewCreator().initialView;
assertTrue(gmsJoinLeave.isCoordinator());
assertTrue(nextView.getCoordinator().equals(gmsJoinLeaveMemberId));
assertTrue(nextView.getMembers().contains(mockMembers[1]));
assertTrue(nextView.getMembers().contains(mockMembers[2]));
assertTrue(nextView.getMembers().contains(mockMembers[3]));
}
/**
* This tests a member shutdown using the memberShutdown call (simulating the call from
* DistributionManager) The gmsJoinLeaveMemberId is not the coordinator but should now become the
* coordinator and remove all members that have sent us leave requests prior to us becoming
* coordinator
*/
@Test
public void testCoordinatorAndOthersShutdownAndWeBecomeCoordinatorProcessQueuedUpLeaveMessages()
throws Exception {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
Assert.assertFalse(gmsJoinLeave.isCoordinator());
// The coordinator and other members shutdown
gmsJoinLeave.memberShutdown(mockMembers[1], "Shutdown");
gmsJoinLeave.memberShutdown(mockMembers[2], "Shutdown");
gmsJoinLeave.memberShutdown(mockMembers[0], "Shutdown");
NetView nextView = gmsJoinLeave.getViewCreator().initialView;
assertTrue(gmsJoinLeave.isCoordinator());
assertTrue(nextView.getCoordinator().equals(gmsJoinLeaveMemberId));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[1]));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[2]));
assertTrue(nextView.getMembers().contains(mockMembers[3]));
}
/**
* In a scenario where we have a member leave at the same time as an install view The member that
* left should be recorded on all members, if the coordinator also happens to leave, the new
* coordinator should be able to process the new view correctly
*/
@Test
public void testTimingWhereInstallViewComeAndDoesNotClearOutLeftMembersList() throws Exception {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
Assert.assertFalse(gmsJoinLeave.isCoordinator());
// The coordinator and other members shutdown
gmsJoinLeave.memberShutdown(mockMembers[1], "Shutdown");
gmsJoinLeave.memberShutdown(mockMembers[2], "Shutdown");
// Install a view that still contains one of the left members (as if something like a new
// member, triggered a new view before coordinator leaves)
NetView netView = new NetView(mockMembers[0], 3/* new view id */,
createMemberList(mockMembers[0], gmsJoinLeaveMemberId, mockMembers[1], mockMembers[3]));
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
// Now coordinator leaves
gmsJoinLeave.memberShutdown(mockMembers[0], "Shutdown");
NetView nextView = gmsJoinLeave.getViewCreator().initialView;
assertTrue(gmsJoinLeave.isCoordinator());
assertTrue(nextView.getCoordinator().equals(gmsJoinLeaveMemberId));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[1]));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[2]));
assertTrue(nextView.getMembers().contains(mockMembers[3]));
}
@Test
public void testViewBroadcaster() throws Exception {
initMocks();
List<InternalDistributedMember> members = new ArrayList<>(Arrays.asList(mockMembers));
gmsJoinLeaveMemberId.setVmViewId(1);
members.add(gmsJoinLeaveMemberId);
prepareAndInstallView(gmsJoinLeaveMemberId, members);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
GMSJoinLeave.ViewBroadcaster b = gmsJoinLeave.new ViewBroadcaster();
b.run();
verify(messenger).sendUnreliably(isA(InstallViewMessage.class));
}
private void installView(int viewId, InternalDistributedMember coordinator,
List<InternalDistributedMember> members) throws IOException {
// prepare the view
NetView netView = new NetView(coordinator, viewId, members);
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
// verify(messenger).send(any(ViewAckMessage.class));
}
@Test
public void testIgnoreoldView() throws Exception {
initMocks(false);
installView(3, mockMembers[0], createMemberList(mockMembers[0], mockMembers[1], mockMembers[2],
gmsJoinLeaveMemberId, mockMembers[3]));
// now try to intall old view..
installView(1, mockMembers[0], createMemberList(mockMembers[0], mockMembers[1], mockMembers[2],
gmsJoinLeaveMemberId, mockMembers[3]));
assertFalse("Expected view id is 3 but found " + gmsJoinLeave.getView().getViewId(),
gmsJoinLeave.getView().getViewId() == 1);
}
@Test
public void testClearViewRequests() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
gmsJoinLeave.processMessage(
new JoinRequestMessage(mockMembers[0], mockMembers[0], credentials, -1, 0));
int viewRequests = gmsJoinLeave.getViewRequests().size();
assertTrue("There should be 1 viewRequest but found " + viewRequests, viewRequests == 1);
Thread.sleep(2 * ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL);
viewRequests = gmsJoinLeave.getViewRequests().size();
assertEquals("Found view requests: " + gmsJoinLeave.getViewRequests(), 0, viewRequests);
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
/***
* validating ViewReplyProcessor's memberSuspected, processLeaveRequest, processRemoveRequest,
* processViewResponse method
*/
@Test
public void testViewReplyProcessor() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
Set<InternalDistributedMember> recips = new HashSet<>();
recips.add(mockMembers[0]);
recips.add(mockMembers[1]);
recips.add(mockMembers[2]);
recips.add(mockMembers[3]);
ViewReplyProcessor prepareProcessor = gmsJoinLeave.getPrepareViewReplyProcessor();
prepareProcessor.initialize(1, recips);
assertTrue("Prepare processor should be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
prepareProcessor.memberSuspected(mockMembers[0]);
prepareProcessor.processLeaveRequest(mockMembers[1]);
prepareProcessor.processRemoveRequest(mockMembers[2]);
prepareProcessor.processViewResponse(1, mockMembers[3], null);
assertFalse("Prepare processor should not be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
/***
* validating ViewReplyProcessor's processPendingRequests method
*/
@Test
public void testViewReplyProcessor2() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
Set<InternalDistributedMember> recips = new HashSet<>();
recips.add(mockMembers[0]);
recips.add(mockMembers[1]);
recips.add(mockMembers[2]);
recips.add(mockMembers[3]);
ViewReplyProcessor prepareProcessor = gmsJoinLeave.getPrepareViewReplyProcessor();
prepareProcessor.initialize(1, recips);
assertTrue("Prepare processor should be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
Set<InternalDistributedMember> pendingLeaves = new HashSet<>();
pendingLeaves.add(mockMembers[0]);
Set<InternalDistributedMember> pendingRemovals = new HashSet<>();
pendingRemovals.add(mockMembers[1]);
prepareProcessor.processPendingRequests(pendingLeaves, pendingRemovals);
prepareProcessor.processViewResponse(1, mockMembers[2], null);
prepareProcessor.processViewResponse(1, mockMembers[3], null);
assertFalse("Prepare processor should not be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
// With the removal of the JoinResponse message from GMSJoinLeave.processJoinRequest (GEODE-870)
// This test now seems to be invalid
// @Test
// public void testJoinResponseMsgWithBecomeCoordinator() throws Exception {
// initMocks(false);
// gmsJoinLeaveMemberId.getNetMember().setPreferredForCoordinator(false);
// JoinRequestMessage reqMsg = new JoinRequestMessage(gmsJoinLeaveMemberId, mockMembers[0], null,
// 56734);
// InternalDistributedMember ids = new InternalDistributedMember("localhost", 97898);
// ids.getNetMember().setPreferredForCoordinator(true);
// gmsJoinLeave.processMessage(reqMsg);
// ArgumentCaptor<JoinResponseMessage> ac = ArgumentCaptor.forClass(JoinResponseMessage.class);
// verify(messenger).send(ac.capture());
//
// assertTrue("Should have asked for becoming a coordinator",
// ac.getValue().getBecomeCoordinator());
// }
@Test
public void testNetworkPartionMessage() throws Exception {
try {
initMocks(true);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
installView(1, gmsJoinLeaveMemberId, createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
for (int i = 1; i < 4; i++) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeaveMemberId, mockMembers[i], "crashed");
msg.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(msg);
}
Timeout to = new Timeout(3 * ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL, new Times(1));
verify(messenger, to).send(isA(NetworkPartitionMessage.class));
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
@Test
public void testViewIgnoredAfterShutdown() throws Exception {
try {
initMocks(true);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
installView(1, gmsJoinLeaveMemberId, createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
gmsJoinLeave.stop();
for (int i = 1; i < 4; i++) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeaveMemberId, mockMembers[i], "crashed");
msg.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(msg);
}
Timeout to = new Timeout(2 * ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL, never());
verify(messenger, to).send(isA(NetworkPartitionMessage.class));
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
@Test
public void testViewNotSentWhenShuttingDown() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
installView(1, gmsJoinLeaveMemberId, createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
assertTrue(gmsJoinLeave.getViewCreator().isAlive());
when(manager.shutdownInProgress()).thenReturn(Boolean.TRUE);
for (int i = 1; i < 4; i++) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeaveMemberId, mockMembers[i], "crashed");
msg.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(msg);
}
Awaitility.await("waiting for view creator to stop").atMost(5000, TimeUnit.MILLISECONDS)
.until(() -> !gmsJoinLeave.getViewCreator().isAlive());
assertEquals(1, gmsJoinLeave.getView().getViewId());
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
@Test
public void testPreparedViewFoundDuringBecomeCoordinator() throws Exception {
initMocks(false);
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
// a new member is joining
NetView preparedView =
new NetView(gmsJoinLeave.getView(), gmsJoinLeave.getView().getViewId() + 5);
mockMembers[1].setVmViewId(preparedView.getViewId());
preparedView.add(mockMembers[1]);
InstallViewMessage msg = getInstallViewMessage(preparedView, null, true);
gmsJoinLeave.processMessage(msg);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
Thread.sleep(2000);
ViewCreator vc = gmsJoinLeave.getViewCreator();
int viewId = 0;
if (gmsJoinLeave.getPreparedView() == null) {
viewId = gmsJoinLeave.getView().getViewId();
} else {
viewId = gmsJoinLeave.getPreparedView().getViewId();
}
ViewAckMessage vack = new ViewAckMessage(gmsJoinLeaveMemberId, viewId, true);
vack.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(vack);
vack = new ViewAckMessage(gmsJoinLeaveMemberId, viewId, true);
vack.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(vack);
vack = new ViewAckMessage(gmsJoinLeaveMemberId, viewId, true);
vack.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(vack);
int tries = 0;
while (!vc.waiting) {
if (tries > 30) {
Assert.fail("view creator never finished");
}
tries++;
Thread.sleep(1000);
}
NetView newView = gmsJoinLeave.getView();
System.out.println("new view is " + newView);
assertTrue(newView.contains(mockMembers[1]));
assertTrue(newView.getViewId() > preparedView.getViewId());
}
private NetView createView() {
List<InternalDistributedMember> mbrs = new LinkedList<>();
Set<InternalDistributedMember> shutdowns = new HashSet<>();
Set<InternalDistributedMember> crashes = new HashSet<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(gmsJoinLeaveMemberId);
// prepare the view
NetView netView = new NetView(mockMembers[0], 1, mbrs, shutdowns, crashes);
return netView;
}
@Test
public void testCoordinatorFindRequestSuccess() throws Exception {
try {
initMocks(false);
HashSet<InternalDistributedMember> registrants = new HashSet<>();
registrants.add(mockMembers[0]);
FindCoordinatorResponse fcr = new FindCoordinatorResponse(mockMembers[0], mockMembers[0],
false, null, registrants, false, true, null);
NetView view = createView();
JoinResponseMessage jrm = new JoinResponseMessage(mockMembers[0], view, 0);
TcpClientWrapper tcpClientWrapper = mock(TcpClientWrapper.class);
gmsJoinLeave.setTcpClientWrapper(tcpClientWrapper);
FindCoordinatorRequest fcreq =
new FindCoordinatorRequest(gmsJoinLeaveMemberId, new HashSet<>(), -1, null, 0, "");
int connectTimeout = (int) services.getConfig().getMemberTimeout() * 2;
when(tcpClientWrapper.sendCoordinatorFindRequest(new InetSocketAddress("localhost", 12345),
fcreq, connectTimeout)).thenReturn(fcr);
callAsnyc(() -> {
gmsJoinLeave.installView(view);
});
assertTrue("Should be able to join ", gmsJoinLeave.join());
} finally {
}
}
private void callAsnyc(Runnable run) {
Thread th = new Thread(run);
th.start();
}
@Test
public void testCoordinatorFindRequestFailure() throws Exception {
try {
initMocks(false);
HashSet<InternalDistributedMember> registrants = new HashSet<>();
registrants.add(mockMembers[0]);
FindCoordinatorResponse fcr = new FindCoordinatorResponse(mockMembers[0], mockMembers[0],
false, null, registrants, false, true, null);
NetView view = createView();
JoinResponseMessage jrm = new JoinResponseMessage(mockMembers[0], view, 0);
gmsJoinLeave.setJoinResponseMessage(jrm);
TcpClientWrapper tcpClientWrapper = mock(TcpClientWrapper.class);
gmsJoinLeave.setTcpClientWrapper(tcpClientWrapper);
FindCoordinatorRequest fcreq =
new FindCoordinatorRequest(gmsJoinLeaveMemberId, new HashSet<>(), -1, null, 0, "");
int connectTimeout = (int) services.getConfig().getMemberTimeout() * 2;
// passing wrong port here, so ot will fail
when(tcpClientWrapper.sendCoordinatorFindRequest(new InetSocketAddress("localhost", 12346),
fcreq, connectTimeout)).thenReturn(fcr);
assertFalse("Should not be able to join ", gmsJoinLeave.join());
} finally {
}
}
private void waitForViewAndFinalCheckInProgress(int viewId) throws InterruptedException {
// wait for the view processing thread to collect and process the requests
int sleeps = 0;
while (!gmsJoinLeave.isStopping() && (gmsJoinLeave.getView().getViewId() == viewId)) {
if (sleeps++ > 20) {
System.out.println("view requests: " + gmsJoinLeave.getViewRequests());
System.out.println("current view: " + gmsJoinLeave.getView());
throw new RuntimeException("timeout waiting for view #" + viewId);
}
Thread.sleep(1000);
System.out.println("Empty sleeps " + sleeps + " stopping: " + gmsJoinLeave.isStopping());
}
}
class GMSJoinLeaveTest extends GMSJoinLeave {
public GMSJoinLeaveTest() {
super();
}
@Override
boolean checkIfAvailable(InternalDistributedMember fmbr) {
if (removeMember != null) {
try {
if (removeMember.equals(fmbr)) {
GMSJoinLeaveJUnitTest.this.processRemoveMessage(fmbr);
Thread.sleep(1000000);
}
} catch (InterruptedException e) {
}
return true;
} else if (leaveMember != null) {
try {
if (leaveMember.equals(fmbr)) {
GMSJoinLeaveJUnitTest.this.processLeaveMessage(fmbr);
Thread.sleep(1000000);
}
} catch (InterruptedException e) {
}
return true;
} else {
return super.checkIfAvailable(fmbr);
}
}
}
@Test
public void testRemoveRequestWhileWaitingForFinalResponse() throws Exception {
initMocks(true, true);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
installView();
int viewId = gmsJoinLeave.getView().getViewId();
System.out.println("Current viewid " + viewId);
this.removeMember = mockMembers[0];
processJoinMessage(gmsJoinLeave.getMemberID(), mockMembers[2], 98989);
waitForViewAndFinalCheckInProgress(viewId);
this.removeMember = null;
assertTrue("testFlagForRemovalRequest should be true",
gmsJoinLeave.getViewCreator().getTestFlageForRemovalRequest());
}
@Test
public void testLeaveRequestWhileWaitingForFinalResponse() throws Exception {
initMocks(true, true);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
installView();
int viewId = gmsJoinLeave.getView().getViewId();
System.out.println("Current viewid " + viewId);
this.leaveMember = mockMembers[0];
processJoinMessage(gmsJoinLeave.getMemberID(), mockMembers[2], 98989);
waitForViewAndFinalCheckInProgress(viewId);
this.leaveMember = null;
assertTrue("testFlagForRemovalRequest should be true",
gmsJoinLeave.getViewCreator().getTestFlageForRemovalRequest());
}
private void installView() throws Exception {
final int viewInstallationTime = 15000;
NetView oldView = null;
long giveup = System.currentTimeMillis() + viewInstallationTime;
while (System.currentTimeMillis() < giveup && oldView == null) {
Thread.sleep(500);
oldView = gmsJoinLeave.getView();
}
assertTrue(oldView != null); // it should have become coordinator and installed a view
NetView newView = new NetView(oldView, oldView.getViewId() + 1);
newView.add(mockMembers[0]);
newView.add(mockMembers[1]);
gmsJoinLeave.installView(newView);
}
private void processJoinMessage(InternalDistributedMember coordinator,
InternalDistributedMember newMember, int port) {
JoinRequestMessage reqMsg = new JoinRequestMessage(coordinator, newMember, null, port, 0);
gmsJoinLeave.processMessage(reqMsg);
}
private void processRemoveMessage(InternalDistributedMember rMember) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeave.getMemberID(), rMember, "testing");
msg.setSender(gmsJoinLeave.getMemberID());
gmsJoinLeave.processMessage(msg);
}
private void processLeaveMessage(InternalDistributedMember rMember) {
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), rMember, "testing");
msg.setSender(rMember);
gmsJoinLeave.processMessage(msg);
}
}
|
geode-core/src/test/java/org/apache/geode/distributed/internal/membership/gms/membership/GMSJoinLeaveJUnitTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.distributed.internal.membership.gms.membership;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.awaitility.Awaitility;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.distributed.internal.membership.NetView;
import org.apache.geode.distributed.internal.membership.gms.GMSMember;
import org.apache.geode.distributed.internal.membership.gms.ServiceConfig;
import org.apache.geode.distributed.internal.membership.gms.Services;
import org.apache.geode.distributed.internal.membership.gms.Services.Stopper;
import org.apache.geode.distributed.internal.membership.gms.interfaces.Authenticator;
import org.apache.geode.distributed.internal.membership.gms.interfaces.HealthMonitor;
import org.apache.geode.distributed.internal.membership.gms.interfaces.Manager;
import org.apache.geode.distributed.internal.membership.gms.interfaces.Messenger;
import org.apache.geode.distributed.internal.membership.gms.locator.FindCoordinatorRequest;
import org.apache.geode.distributed.internal.membership.gms.locator.FindCoordinatorResponse;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.SearchState;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.TcpClientWrapper;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.ViewCreator;
import org.apache.geode.distributed.internal.membership.gms.membership.GMSJoinLeave.ViewReplyProcessor;
import org.apache.geode.distributed.internal.membership.gms.messages.InstallViewMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.JoinRequestMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.JoinResponseMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.LeaveRequestMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.NetworkPartitionMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.RemoveMemberMessage;
import org.apache.geode.distributed.internal.membership.gms.messages.ViewAckMessage;
import org.apache.geode.internal.Version;
import org.apache.geode.security.AuthenticationFailedException;
import org.apache.geode.test.junit.categories.IntegrationTest;
import org.apache.geode.test.junit.categories.MembershipTest;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.internal.verification.Times;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockito.verification.Timeout;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.Timer;
import java.util.concurrent.TimeUnit;
@Category({IntegrationTest.class, MembershipTest.class})
public class GMSJoinLeaveJUnitTest {
private Services services;
private ServiceConfig mockConfig;
private DistributionConfig mockDistConfig;
private Authenticator authenticator;
private HealthMonitor healthMonitor;
private InternalDistributedMember gmsJoinLeaveMemberId;
private InternalDistributedMember[] mockMembers;
private InternalDistributedMember mockOldMember;
private Properties credentials = new Properties();
private Messenger messenger;
private GMSJoinLeave gmsJoinLeave;
private Manager manager;
private Stopper stopper;
private InternalDistributedMember removeMember = null;
private InternalDistributedMember leaveMember = null;
public void initMocks() throws IOException {
initMocks(false);
}
public void initMocks(boolean enableNetworkPartition) throws UnknownHostException {
initMocks(enableNetworkPartition, false);
}
public void initMocks(boolean enableNetworkPartition, boolean useTestGMSJoinLeave)
throws UnknownHostException {
mockDistConfig = mock(DistributionConfig.class);
when(mockDistConfig.getEnableNetworkPartitionDetection()).thenReturn(enableNetworkPartition);
when(mockDistConfig.getLocators()).thenReturn("localhost[8888]");
when(mockDistConfig.getSecurityUDPDHAlgo()).thenReturn("");
mockConfig = mock(ServiceConfig.class);
when(mockDistConfig.getStartLocator()).thenReturn("localhost[12345]");
when(mockConfig.getDistributionConfig()).thenReturn(mockDistConfig);
when(mockDistConfig.getLocators()).thenReturn("localhost[12345]");
when(mockDistConfig.getMcastPort()).thenReturn(0);
when(mockDistConfig.getMemberTimeout()).thenReturn(2000);
authenticator = mock(Authenticator.class);
gmsJoinLeaveMemberId = new InternalDistributedMember("localhost", 8887);
messenger = mock(Messenger.class);
when(messenger.getMemberID()).thenReturn(gmsJoinLeaveMemberId);
stopper = mock(Stopper.class);
when(stopper.isCancelInProgress()).thenReturn(false);
manager = mock(Manager.class);
healthMonitor = mock(HealthMonitor.class);
when(healthMonitor.getFailureDetectionPort()).thenReturn(Integer.valueOf(-1));
services = mock(Services.class);
when(services.getAuthenticator()).thenReturn(authenticator);
when(services.getConfig()).thenReturn(mockConfig);
when(services.getMessenger()).thenReturn(messenger);
when(services.getCancelCriterion()).thenReturn(stopper);
when(services.getManager()).thenReturn(manager);
when(services.getHealthMonitor()).thenReturn(healthMonitor);
Timer t = new Timer(true);
when(services.getTimer()).thenReturn(t);
mockMembers = new InternalDistributedMember[4];
for (int i = 0; i < mockMembers.length; i++) {
mockMembers[i] = new InternalDistributedMember("localhost", 8888 + i);
}
mockOldMember = new InternalDistributedMember("localhost", 8700, Version.GFE_56);
if (useTestGMSJoinLeave) {
gmsJoinLeave = new GMSJoinLeaveTest();
} else {
gmsJoinLeave = new GMSJoinLeave();
}
gmsJoinLeave.init(services);
gmsJoinLeave.start();
gmsJoinLeave.started();
}
@After
public void tearDown() throws Exception {
if (gmsJoinLeave != null) {
gmsJoinLeave.stop();
gmsJoinLeave.stopped();
}
}
@Test
public void testFindCoordinatorInView() throws Exception {
initMocks();
int viewId = 1;
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
when(services.getMessenger()).thenReturn(messenger);
// prepare the view
NetView netView = new NetView(mockMembers[0], viewId, mbrs);
SearchState state = gmsJoinLeave.searchState;
state.view = netView;
state.viewId = netView.getViewId();
InternalDistributedMember coordinator = mockMembers[2];
coordinator.setVmViewId(viewId);
// already tried joining using members 0 and 1
Set<InternalDistributedMember> set = new HashSet<>();
mockMembers[0].setVmViewId(viewId - 1);
set.add(mockMembers[0]);
mockMembers[1].setVmViewId(viewId - 1);
set.add(mockMembers[1]);
state.alreadyTried = set;
state.hasContactedAJoinedLocator = true;
// simulate a response being received
InternalDistributedMember sender = mockMembers[2];
FindCoordinatorResponse resp = new FindCoordinatorResponse(coordinator, sender, null, 0);
gmsJoinLeave.processMessage(resp);
// tell GMSJoinLeave that a unit test is running so it won't clear the
// responses collection
gmsJoinLeave.unitTesting.add("findCoordinatorFromView");
// now for the test
boolean result = gmsJoinLeave.findCoordinatorFromView();
assertTrue("should have found coordinator " + mockMembers[2], result);
assertTrue("should have found " + coordinator + " but found " + state.possibleCoordinator,
state.possibleCoordinator == coordinator);
}
@Test
public void testProcessJoinMessageRejectOldMemberVersion() throws IOException {
initMocks();
gmsJoinLeave.processMessage(new JoinRequestMessage(mockOldMember, mockOldMember, null, -1, 0));
assertTrue("JoinRequest should not have been added to view request",
gmsJoinLeave.getViewRequests().size() == 0);
verify(messenger).send(any(JoinResponseMessage.class));
}
@Test
public void testViewWithoutMemberInitiatesForcedDisconnect() throws Exception {
initMocks();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
List<InternalDistributedMember> members = Arrays.asList(mockMembers);
NetView v = new NetView(mockMembers[0], 2, members);
InstallViewMessage message = getInstallViewMessage(v, null, false);
gmsJoinLeave.processMessage(message);
verify(manager).forceDisconnect(any(String.class));
}
@Test
public void testProcessJoinMessageWithBadAuthentication() throws IOException {
initMocks();
when(services.getAuthenticator()).thenReturn(authenticator);
when(authenticator.authenticate(mockMembers[0], credentials))
.thenThrow(new AuthenticationFailedException("we want to fail auth here"));
when(services.getMessenger()).thenReturn(messenger);
gmsJoinLeave
.processMessage(new JoinRequestMessage(mockMembers[0], mockMembers[0], credentials, -1, 0));
assertTrue("JoinRequest should not have been added to view request",
gmsJoinLeave.getViewRequests().size() == 0);
verify(messenger).send(any(JoinResponseMessage.class));
}
@Test
public void testProcessJoinMessageWithAuthenticationButNullCredentials() throws IOException {
initMocks();
when(services.getAuthenticator()).thenReturn(authenticator);
when(authenticator.authenticate(mockMembers[0], null))
.thenThrow(new AuthenticationFailedException("we want to fail auth here"));
when(services.getMessenger()).thenReturn(messenger);
gmsJoinLeave
.processMessage(new JoinRequestMessage(mockMembers[0], mockMembers[0], null, -1, 0));
assertTrue("JoinRequest should not have been added to view request",
gmsJoinLeave.getViewRequests().size() == 0);
verify(messenger).send(any(JoinResponseMessage.class));
}
// This test does not test the actual join process but rather that the join response gets loggedß
@Test
public void testProcessJoinResponseIsRecorded() throws IOException {
initMocks();
when(services.getAuthenticator()).thenReturn(authenticator);
when(authenticator.authenticate(mockMembers[0], null))
.thenThrow(new AuthenticationFailedException("we want to fail auth here"));
when(services.getMessenger()).thenReturn(messenger);
JoinResponseMessage[] joinResponse = gmsJoinLeave.getJoinResponseMessage();
JoinResponseMessage jrm = new JoinResponseMessage(mockMembers[0], new byte[9], 233);
gmsJoinLeave.processMessage(jrm);
// this should NOT logs, this is just to inform member succesful joining
Assert.assertEquals(null, joinResponse[0]);
jrm = new JoinResponseMessage("rejected...", 0);
gmsJoinLeave.processMessage(jrm);
// this should log..
Assert.assertEquals(jrm, joinResponse[0]);
gmsJoinLeave.setJoinResponseMessage(null);
jrm = new JoinResponseMessage(mockMembers[0], new NetView(), 0);
gmsJoinLeave.processMessage(jrm);
// this should log..
Assert.assertEquals(jrm, joinResponse[0]);
}
/**
* prepares and install a view
*
* @throws IOException
*/
private void prepareAndInstallView(InternalDistributedMember coordinator,
List<InternalDistributedMember> members) throws IOException {
int viewId = 1;
when(services.getMessenger()).thenReturn(messenger);
// prepare the view
NetView netView = new NetView(coordinator, viewId, members);
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, true);
gmsJoinLeave.processMessage(installViewMessage);
verify(messenger).send(any(ViewAckMessage.class));
// install the view
installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Assert.assertEquals(netView, gmsJoinLeave.getView());
}
private List<InternalDistributedMember> createMemberList(InternalDistributedMember... members) {
List<InternalDistributedMember> memberList =
new ArrayList<InternalDistributedMember>(members.length);
for (InternalDistributedMember member : members) {
memberList.add(member);
}
return memberList;
}
@Test
public void testRemoveMember() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
MethodExecuted removeMessageSent = new MethodExecuted();
when(messenger.send(any(RemoveMemberMessage.class))).thenAnswer(removeMessageSent);
gmsJoinLeave.remove(mockMembers[0], "removing for test");
Thread.sleep(ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL * 2);
assertTrue(removeMessageSent.methodExecuted);
}
@Test
public void testIsMemberLeaving() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0],
createMemberList(mockMembers[0], mockMembers[1], gmsJoinLeaveMemberId));
MethodExecuted removeMessageSent = new MethodExecuted();
when(messenger.send(any(RemoveMemberMessage.class))).thenAnswer(removeMessageSent);
assertFalse(gmsJoinLeave.isMemberLeaving(mockMembers[0]));
assertFalse(gmsJoinLeave.isMemberLeaving(mockMembers[1]));
gmsJoinLeave.remove(mockMembers[0], "removing for test");
assertTrue(gmsJoinLeave.isMemberLeaving(mockMembers[0]));
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[1], "leaving for test");
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
assertTrue(gmsJoinLeave.isMemberLeaving(mockMembers[1]));
}
@Test
public void testRemoveAndLeaveIsNotACrash() throws Exception {
// simultaneous leave & remove requests for a member
// should not result in it's being seen as a crashed member
initMocks();
final int viewInstallationTime = 15000;
when(healthMonitor.checkIfAvailable(any(InternalDistributedMember.class), any(String.class),
any(Boolean.class))).thenReturn(true);
gmsJoinLeave.delayViewCreationForTest(5000); // ensures multiple requests are queued for a view
// change
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
NetView oldView = null;
long giveup = System.currentTimeMillis() + viewInstallationTime;
while (System.currentTimeMillis() < giveup && oldView == null) {
Thread.sleep(500);
oldView = gmsJoinLeave.getView();
}
assertTrue(oldView != null); // it should have become coordinator and installed a view
NetView newView = new NetView(oldView, oldView.getViewId() + 1);
newView.add(mockMembers[1]);
newView.add(mockMembers[2]);
gmsJoinLeave.installView(newView);
gmsJoinLeave.memberShutdown(mockMembers[1], "shutting down for test");
gmsJoinLeave.remove(mockMembers[1], "removing for test");
giveup = System.currentTimeMillis() + viewInstallationTime;
while (System.currentTimeMillis() < giveup
&& gmsJoinLeave.getView().getViewId() == newView.getViewId()) {
Thread.sleep(500);
}
assertTrue(gmsJoinLeave.getView().getViewId() > newView.getViewId());
assertFalse(gmsJoinLeave.getView().getCrashedMembers().contains(mockMembers[1]));
}
@Test
public void testRejectOlderView() throws IOException {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
// try to install an older view where viewId < currentView.viewId
NetView olderNetView = new NetView(mockMembers[0], 0, mbrs);
InstallViewMessage installViewMessage = getInstallViewMessage(olderNetView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Assert.assertNotEquals(gmsJoinLeave.getView(), olderNetView);
}
@Test
public void testForceDisconnectedFromNewView() throws IOException {
initMocks(true);// enabledNetworkPartition;
Manager mockManager = mock(Manager.class);
when(services.getManager()).thenReturn(mockManager);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
int viewId = 2;
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(mockMembers[3]);
// install the view
NetView netView = new NetView(mockMembers[0], viewId, mbrs);
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Assert.assertNotEquals(netView, gmsJoinLeave.getView());
verify(mockManager).forceDisconnect(any(String.class));
}
@SuppressWarnings("rawtypes")
private class MethodExecuted implements Answer {
private boolean methodExecuted = false;
@Override
public Object answer(InvocationOnMock invocation) {
// do we only expect a join response on a failure?
methodExecuted = true;
return null;
}
}
@Test
public void testNonMemberCantRemoveMember() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
// test that a non-member can't remove another member
RemoveMemberMessage msg = new RemoveMemberMessage(mockMembers[0], mockMembers[1], reason);
msg.setSender(new InternalDistributedMember("localhost", 9000));
gmsJoinLeave.processMessage(msg);
assertTrue("RemoveMemberMessage should not have been added to view requests",
gmsJoinLeave.getViewRequests().size() == 0);
}
@Test
public void testDuplicateLeaveRequestDoesNotCauseNewView() throws Exception {
String reason = "testing";
initMocks();
gmsJoinLeave.unitTesting.add("noRandomViewChange");
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
msg = new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
waitForViewAndNoRequestsInProgress(7);
NetView view = gmsJoinLeave.getView();
assertTrue("expected member to be removed: " + mockMembers[0] + "; view: " + view,
!view.contains(mockMembers[0]));
assertTrue("expected member to be in shutdownMembers collection: " + mockMembers[0] + "; view: "
+ view, view.getShutdownMembers().contains(mockMembers[0]));
}
@Test
public void testDuplicateRemoveRequestDoesNotCauseNewView() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
gmsJoinLeave.getView().add(mockMembers[1]);
gmsJoinLeave.unitTesting.add("noRandomViewChange");
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
msg = new RemoveMemberMessage(gmsJoinLeave.getMemberID(), mockMembers[0], reason);
msg.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(msg);
waitForViewAndNoRequestsInProgress(7);
NetView view = gmsJoinLeave.getView();
assertTrue("expected member to be removed: " + mockMembers[0] + "; view: " + view,
!view.contains(mockMembers[0]));
assertTrue(
"expected member to be in crashedMembers collection: " + mockMembers[0] + "; view: " + view,
view.getCrashedMembers().contains(mockMembers[0]));
}
// @Category(FlakyTest.class) // GEODE-2074: timed out waiting for view #7
@Test
public void testDuplicateJoinRequestDoesNotCauseNewView() throws Exception {
initMocks();
when(healthMonitor.checkIfAvailable(any(InternalDistributedMember.class), any(String.class),
any(Boolean.class))).thenReturn(true);
gmsJoinLeave.unitTesting.add("noRandomViewChange");
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
gmsJoinLeave.getView().add(mockMembers[1]);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
JoinRequestMessage msg =
new JoinRequestMessage(gmsJoinLeaveMemberId, mockMembers[2], null, -1, 0);
msg.setSender(mockMembers[2]);
gmsJoinLeave.processMessage(msg);
msg = new JoinRequestMessage(gmsJoinLeaveMemberId, mockMembers[2], null, -1, 0);
msg.setSender(mockMembers[2]);
gmsJoinLeave.processMessage(msg);
waitForViewAndNoRequestsInProgress(7);
NetView view = gmsJoinLeave.getView();
assertTrue("expected member to be added: " + mockMembers[2] + "; view: " + view,
view.contains(mockMembers[2]));
List<InternalDistributedMember> members = view.getMembers();
int occurrences = 0;
for (InternalDistributedMember mbr : members) {
if (mbr.equals(mockMembers[2])) {
occurrences += 1;
}
}
assertTrue("expected member to only be in the view once: " + mockMembers[2] + "; view: " + view,
occurrences == 1);
verify(healthMonitor, times(5)).checkIfAvailable(any(InternalDistributedMember.class),
any(String.class), any(Boolean.class));
}
private void waitForViewAndNoRequestsInProgress(int viewId) throws InterruptedException {
// wait for the view processing thread to collect and process the requests
int sleeps = 0;
while (!gmsJoinLeave.isStopping() && !gmsJoinLeave.getViewCreator().isWaiting()
&& (!gmsJoinLeave.getViewRequests().isEmpty()
|| gmsJoinLeave.getView().getViewId() != viewId)) {
if (sleeps++ > 20) {
throw new RuntimeException("timeout waiting for view #" + viewId + " current view: "
+ gmsJoinLeave.getView() + "; view requests: " + gmsJoinLeave.getViewRequests());
}
Thread.sleep(1000);
}
}
@Test
public void testRemoveCausesForcedDisconnect() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
gmsJoinLeave.getView().add(mockMembers[1]);
RemoveMemberMessage msg =
new RemoveMemberMessage(mockMembers[0], gmsJoinLeave.getMemberID(), reason);
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
verify(manager).forceDisconnect(reason);
}
@Test
public void testLeaveCausesForcedDisconnect() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
gmsJoinLeave.getView().add(mockMembers[1]);
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), gmsJoinLeave.getMemberID(), reason);
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
verify(manager).forceDisconnect(reason);
}
@Test
public void testLeaveOfNonMemberIsNoOp() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
mockMembers[1].setVmViewId(gmsJoinLeave.getView().getViewId() - 1);
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), mockMembers[1], reason);
msg.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected leave request from non-member to be ignored",
gmsJoinLeave.getViewRequests().isEmpty());
}
@Test
public void testBecomeCoordinatorOnStartup() throws Exception {
initMocks();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
long giveup = System.currentTimeMillis() + 20000;
while (System.currentTimeMillis() < giveup && !gmsJoinLeave.isCoordinator()) {
Thread.sleep(1000);
}
assertTrue(gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeCoordinator() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView view = gmsJoinLeave.getView();
view.add(gmsJoinLeaveMemberId);
InternalDistributedMember creator = view.getCreator();
LeaveRequestMessage msg = new LeaveRequestMessage(creator, creator, reason);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected becomeCoordinator to be invoked", gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeCoordinatorThroughRemove() throws Exception {
String reason = "testing";
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView view = gmsJoinLeave.getView();
view.add(gmsJoinLeaveMemberId);
InternalDistributedMember creator = view.getCreator();
RemoveMemberMessage msg = new RemoveMemberMessage(creator, creator, reason);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected becomeCoordinator to be invoked", gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeCoordinatorThroughViewChange() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView oldView = gmsJoinLeave.getView();
oldView.add(gmsJoinLeaveMemberId);
NetView view = new NetView(oldView, oldView.getViewId() + 1);
InternalDistributedMember creator = view.getCreator();
view.remove(creator);
InstallViewMessage msg = getInstallViewMessage(view, creator, false);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected it to become coordinator", gmsJoinLeave.isCoordinator());
}
@Test
public void testBecomeParticipantThroughViewChange() throws Exception {
initMocks();
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView oldView = gmsJoinLeave.getView();
oldView.add(gmsJoinLeaveMemberId);
InternalDistributedMember creator = oldView.getCreator();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
NetView view = new NetView(2, gmsJoinLeave.getView().getViewId() + 1);
view.setCreator(creator);
view.add(creator);
view.add(gmsJoinLeaveMemberId);
InstallViewMessage msg = getInstallViewMessage(view, creator, false);
msg.setSender(creator);
gmsJoinLeave.processMessage(msg);
assertTrue("Expected it to stop being coordinator", !gmsJoinLeave.isCoordinator());
}
private InstallViewMessage getInstallViewMessage(NetView view, Object credentials,
boolean preparing) {
InstallViewMessage installViewMessage = new InstallViewMessage(view, credentials, preparing);
installViewMessage.setSender(gmsJoinLeaveMemberId);
return installViewMessage;
}
@Test
public void testNetworkPartitionDetected() throws IOException {
initMocks(true);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
// set up a view with sufficient members, then create a new view
// where enough weight is lost to cause a network partition
List<InternalDistributedMember> mbrs = new LinkedList<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(gmsJoinLeaveMemberId);
((GMSMember) mockMembers[1].getNetMember()).setMemberWeight((byte) 20);
NetView newView = new NetView(mockMembers[0], gmsJoinLeave.getView().getViewId() + 1, mbrs);
InstallViewMessage installViewMessage = getInstallViewMessage(newView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
Set<InternalDistributedMember> crashes = new HashSet<>();
crashes.add(mockMembers[1]);
crashes.add(mockMembers[2]);
mbrs = new LinkedList<>(mbrs);
mbrs.remove(mockMembers[1]);
mbrs.remove(mockMembers[2]);
NetView partitionView =
new NetView(mockMembers[0], newView.getViewId() + 1, mbrs, Collections.emptySet(), crashes);
installViewMessage = getInstallViewMessage(partitionView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
verify(manager).forceDisconnect(isA(String.class));
verify(manager).quorumLost(crashes, newView);
}
// Possibly modify test to check for network partition message in the force disconnect
@Test
public void testNetworkPartitionMessageReceived() throws Exception {
initMocks();
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
NetworkPartitionMessage message = new NetworkPartitionMessage();
gmsJoinLeave.processMessage(message);
verify(manager).forceDisconnect(any(String.class));
}
@Test
public void testQuorumLossNotificationWithNetworkPartitionDetectionDisabled() throws IOException {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
// set up a view with sufficient members, then create a new view
// where enough weight is lost to cause a network partition
List<InternalDistributedMember> mbrs = new LinkedList<>();
Set<InternalDistributedMember> shutdowns = new HashSet<>();
Set<InternalDistributedMember> crashes = new HashSet<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(gmsJoinLeaveMemberId);
((GMSMember) mockMembers[1].getNetMember()).setMemberWeight((byte) 20);
NetView newView = new NetView(mockMembers[0], gmsJoinLeave.getView().getViewId() + 1, mbrs,
shutdowns, crashes);
InstallViewMessage installViewMessage = getInstallViewMessage(newView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
crashes = new HashSet<>(crashes);
crashes.add(mockMembers[1]);
crashes.add(mockMembers[2]);
mbrs = new LinkedList<>(mbrs);
mbrs.remove(mockMembers[1]);
mbrs.remove(mockMembers[2]);
NetView partitionView =
new NetView(mockMembers[0], newView.getViewId() + 1, mbrs, shutdowns, crashes);
installViewMessage = getInstallViewMessage(partitionView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
verify(manager, never()).forceDisconnect(any(String.class));
verify(manager).quorumLost(crashes, newView);
}
@Test
public void testConflictingPrepare() throws Exception {
initMocks(true);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView gmsView = gmsJoinLeave.getView();
NetView newView = new NetView(gmsView, gmsView.getViewId() + 6);
InstallViewMessage msg = getInstallViewMessage(newView, null, true);
gmsJoinLeave.processMessage(msg);
NetView alternateView = new NetView(gmsView, gmsView.getViewId() + 1);
msg = getInstallViewMessage(alternateView, null, true);
gmsJoinLeave.processMessage(msg);
assertTrue(gmsJoinLeave.getPreparedView().equals(newView));
}
@Test
public void testNoViewAckCausesRemovalMessage() throws Exception {
initMocks(true);
when(healthMonitor.checkIfAvailable(any(InternalDistributedMember.class), any(String.class),
any(Boolean.class))).thenReturn(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId));
NetView oldView = gmsJoinLeave.getView();
NetView newView = new NetView(oldView, oldView.getViewId() + 1);
// the new view will remove the old coordinator (normal shutdown) and add a new member
// who will not ack the view. This should cause it to be removed from the system
// with a RemoveMemberMessage
newView.add(mockMembers[2]);
newView.remove(mockMembers[0]);
InstallViewMessage installViewMessage = getInstallViewMessage(newView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
long giveup = System.currentTimeMillis() + (2000 * 3); // this test's member-timeout * 3
while (System.currentTimeMillis() < giveup
&& gmsJoinLeave.getView().getViewId() == oldView.getViewId()) {
Thread.sleep(1000);
}
assertTrue(gmsJoinLeave.isCoordinator());
// wait for suspect processing
Thread.sleep(10000);
verify(healthMonitor, atLeast(1)).checkIfAvailable(isA(DistributedMember.class),
isA(String.class), isA(Boolean.class));
// verify(messenger, atLeast(1)).send(isA(RemoveMemberMessage.class));
}
/**
* This tests a member shutdown using the memberShutdown call (simulating the call from
* DistributionManager) The gmsJoinLeaveMemberId is not the coordinator but should now become the
* coordinator.
*/
@Test
public void testCoordinatorShutsdownAndWeBecomeCoordinatorAndSendOutCorrectView()
throws Exception {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], gmsJoinLeaveMemberId,
mockMembers[1], mockMembers[2], mockMembers[3]));
Assert.assertFalse(gmsJoinLeave.isCoordinator());
// The coordinator shuts down
gmsJoinLeave.memberShutdown(mockMembers[0], "Shutdown");
NetView nextView = gmsJoinLeave.getViewCreator().initialView;
assertTrue(gmsJoinLeave.isCoordinator());
assertTrue(nextView.getCoordinator().equals(gmsJoinLeaveMemberId));
assertTrue(nextView.getMembers().contains(mockMembers[1]));
assertTrue(nextView.getMembers().contains(mockMembers[2]));
assertTrue(nextView.getMembers().contains(mockMembers[3]));
}
/**
* This tests a member shutdown using the memberShutdown call (simulating the call from
* DistributionManager) The gmsJoinLeaveMemberId is not the coordinator but should now become the
* coordinator and remove all members that have sent us leave requests prior to us becoming
* coordinator
*/
@Test
public void testCoordinatorAndOthersShutdownAndWeBecomeCoordinatorProcessQueuedUpLeaveMessages()
throws Exception {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
Assert.assertFalse(gmsJoinLeave.isCoordinator());
// The coordinator and other members shutdown
gmsJoinLeave.memberShutdown(mockMembers[1], "Shutdown");
gmsJoinLeave.memberShutdown(mockMembers[2], "Shutdown");
gmsJoinLeave.memberShutdown(mockMembers[0], "Shutdown");
NetView nextView = gmsJoinLeave.getViewCreator().initialView;
assertTrue(gmsJoinLeave.isCoordinator());
assertTrue(nextView.getCoordinator().equals(gmsJoinLeaveMemberId));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[1]));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[2]));
assertTrue(nextView.getMembers().contains(mockMembers[3]));
}
/**
* In a scenario where we have a member leave at the same time as an install view The member that
* left should be recorded on all members, if the coordinator also happens to leave, the new
* coordinator should be able to process the new view correctly
*/
@Test
public void testTimingWhereInstallViewComeAndDoesNotClearOutLeftMembersList() throws Exception {
initMocks(false);
prepareAndInstallView(mockMembers[0], createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
Assert.assertFalse(gmsJoinLeave.isCoordinator());
// The coordinator and other members shutdown
gmsJoinLeave.memberShutdown(mockMembers[1], "Shutdown");
gmsJoinLeave.memberShutdown(mockMembers[2], "Shutdown");
// Install a view that still contains one of the left members (as if something like a new
// member, triggered a new view before coordinator leaves)
NetView netView = new NetView(mockMembers[0], 3/* new view id */,
createMemberList(mockMembers[0], gmsJoinLeaveMemberId, mockMembers[1], mockMembers[3]));
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
// Now coordinator leaves
gmsJoinLeave.memberShutdown(mockMembers[0], "Shutdown");
NetView nextView = gmsJoinLeave.getViewCreator().initialView;
assertTrue(gmsJoinLeave.isCoordinator());
assertTrue(nextView.getCoordinator().equals(gmsJoinLeaveMemberId));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[1]));
Assert.assertFalse(nextView.getMembers().contains(mockMembers[2]));
assertTrue(nextView.getMembers().contains(mockMembers[3]));
}
@Test
public void testViewBroadcaster() throws Exception {
initMocks();
List<InternalDistributedMember> members = new ArrayList<>(Arrays.asList(mockMembers));
gmsJoinLeaveMemberId.setVmViewId(1);
members.add(gmsJoinLeaveMemberId);
prepareAndInstallView(gmsJoinLeaveMemberId, members);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
GMSJoinLeave.ViewBroadcaster b = gmsJoinLeave.new ViewBroadcaster();
b.run();
verify(messenger).sendUnreliably(isA(InstallViewMessage.class));
}
private void installView(int viewId, InternalDistributedMember coordinator,
List<InternalDistributedMember> members) throws IOException {
// prepare the view
NetView netView = new NetView(coordinator, viewId, members);
InstallViewMessage installViewMessage = getInstallViewMessage(netView, credentials, false);
gmsJoinLeave.processMessage(installViewMessage);
// verify(messenger).send(any(ViewAckMessage.class));
}
@Test
public void testIgnoreoldView() throws Exception {
initMocks(false);
installView(3, mockMembers[0], createMemberList(mockMembers[0], mockMembers[1], mockMembers[2],
gmsJoinLeaveMemberId, mockMembers[3]));
// now try to intall old view..
installView(1, mockMembers[0], createMemberList(mockMembers[0], mockMembers[1], mockMembers[2],
gmsJoinLeaveMemberId, mockMembers[3]));
assertFalse("Expected view id is 3 but found " + gmsJoinLeave.getView().getViewId(),
gmsJoinLeave.getView().getViewId() == 1);
}
@Test
public void testClearViewRequests() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
gmsJoinLeave.processMessage(
new JoinRequestMessage(mockMembers[0], mockMembers[0], credentials, -1, 0));
int viewRequests = gmsJoinLeave.getViewRequests().size();
assertTrue("There should be 1 viewRequest but found " + viewRequests, viewRequests == 1);
Thread.sleep(2 * ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL);
viewRequests = gmsJoinLeave.getViewRequests().size();
assertEquals("Found view requests: " + gmsJoinLeave.getViewRequests(), 0, viewRequests);
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
/***
* validating ViewReplyProcessor's memberSuspected, processLeaveRequest, processRemoveRequest,
* processViewResponse method
*/
@Test
public void testViewReplyProcessor() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
Set<InternalDistributedMember> recips = new HashSet<>();
recips.add(mockMembers[0]);
recips.add(mockMembers[1]);
recips.add(mockMembers[2]);
recips.add(mockMembers[3]);
ViewReplyProcessor prepareProcessor = gmsJoinLeave.getPrepareViewReplyProcessor();
prepareProcessor.initialize(1, recips);
assertTrue("Prepare processor should be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
prepareProcessor.memberSuspected(mockMembers[0]);
prepareProcessor.processLeaveRequest(mockMembers[1]);
prepareProcessor.processRemoveRequest(mockMembers[2]);
prepareProcessor.processViewResponse(1, mockMembers[3], null);
assertFalse("Prepare processor should not be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
/***
* validating ViewReplyProcessor's processPendingRequests method
*/
@Test
public void testViewReplyProcessor2() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
Set<InternalDistributedMember> recips = new HashSet<>();
recips.add(mockMembers[0]);
recips.add(mockMembers[1]);
recips.add(mockMembers[2]);
recips.add(mockMembers[3]);
ViewReplyProcessor prepareProcessor = gmsJoinLeave.getPrepareViewReplyProcessor();
prepareProcessor.initialize(1, recips);
assertTrue("Prepare processor should be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
Set<InternalDistributedMember> pendingLeaves = new HashSet<>();
pendingLeaves.add(mockMembers[0]);
Set<InternalDistributedMember> pendingRemovals = new HashSet<>();
pendingRemovals.add(mockMembers[1]);
prepareProcessor.processPendingRequests(pendingLeaves, pendingRemovals);
prepareProcessor.processViewResponse(1, mockMembers[2], null);
prepareProcessor.processViewResponse(1, mockMembers[3], null);
assertFalse("Prepare processor should not be waiting ",
gmsJoinLeave.testPrepareProcessorWaiting());
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
// With the removal of the JoinResponse message from GMSJoinLeave.processJoinRequest (GEODE-870)
// This test now seems to be invalid
// @Test
// public void testJoinResponseMsgWithBecomeCoordinator() throws Exception {
// initMocks(false);
// gmsJoinLeaveMemberId.getNetMember().setPreferredForCoordinator(false);
// JoinRequestMessage reqMsg = new JoinRequestMessage(gmsJoinLeaveMemberId, mockMembers[0], null,
// 56734);
// InternalDistributedMember ids = new InternalDistributedMember("localhost", 97898);
// ids.getNetMember().setPreferredForCoordinator(true);
// gmsJoinLeave.processMessage(reqMsg);
// ArgumentCaptor<JoinResponseMessage> ac = ArgumentCaptor.forClass(JoinResponseMessage.class);
// verify(messenger).send(ac.capture());
//
// assertTrue("Should have asked for becoming a coordinator",
// ac.getValue().getBecomeCoordinator());
// }
@Test
public void testNetworkPartionMessage() throws Exception {
try {
initMocks(true);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
installView(1, gmsJoinLeaveMemberId, createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
for (int i = 1; i < 4; i++) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeaveMemberId, mockMembers[i], "crashed");
msg.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(msg);
}
Timeout to = new Timeout(3 * ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL, new Times(1));
verify(messenger, to).send(isA(NetworkPartitionMessage.class));
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
@Test
public void testViewIgnoredAfterShutdown() throws Exception {
try {
initMocks(true);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
installView(1, gmsJoinLeaveMemberId, createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
gmsJoinLeave.stop();
for (int i = 1; i < 4; i++) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeaveMemberId, mockMembers[i], "crashed");
msg.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(msg);
}
Timeout to = new Timeout(2 * ServiceConfig.MEMBER_REQUEST_COLLECTION_INTERVAL, never());
verify(messenger, to).send(isA(NetworkPartitionMessage.class));
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
@Test
public void testViewNotSentWhenShuttingDown() throws Exception {
try {
initMocks(false);
System.setProperty(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY, "true");
gmsJoinLeave.join();
installView(1, gmsJoinLeaveMemberId, createMemberList(mockMembers[0], mockMembers[1],
mockMembers[2], gmsJoinLeaveMemberId, mockMembers[3]));
assertTrue(gmsJoinLeave.getViewCreator().isAlive());
when(manager.shutdownInProgress()).thenReturn(Boolean.TRUE);
for (int i = 1; i < 4; i++) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeaveMemberId, mockMembers[i], "crashed");
msg.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(msg);
}
Awaitility.await("waiting for view creator to stop").atMost(5000, TimeUnit.MILLISECONDS)
.until(() -> !gmsJoinLeave.getViewCreator().isAlive());
assertEquals(1, gmsJoinLeave.getView().getViewId());
} finally {
System.getProperties().remove(GMSJoinLeave.BYPASS_DISCOVERY_PROPERTY);
}
}
@Test
public void testPreparedViewFoundDuringBecomeCoordinator() throws Exception {
initMocks(false);
prepareAndInstallView(gmsJoinLeaveMemberId,
createMemberList(gmsJoinLeaveMemberId, mockMembers[0]));
// a new member is joining
NetView preparedView =
new NetView(gmsJoinLeave.getView(), gmsJoinLeave.getView().getViewId() + 5);
mockMembers[1].setVmViewId(preparedView.getViewId());
preparedView.add(mockMembers[1]);
InstallViewMessage msg = getInstallViewMessage(preparedView, null, true);
gmsJoinLeave.processMessage(msg);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
Thread.sleep(2000);
ViewCreator vc = gmsJoinLeave.getViewCreator();
int viewId = 0;
if (gmsJoinLeave.getPreparedView() == null) {
viewId = gmsJoinLeave.getView().getViewId();
} else {
viewId = gmsJoinLeave.getPreparedView().getViewId();
}
ViewAckMessage vack = new ViewAckMessage(gmsJoinLeaveMemberId, viewId, true);
vack.setSender(mockMembers[0]);
gmsJoinLeave.processMessage(vack);
vack = new ViewAckMessage(gmsJoinLeaveMemberId, viewId, true);
vack.setSender(mockMembers[1]);
gmsJoinLeave.processMessage(vack);
vack = new ViewAckMessage(gmsJoinLeaveMemberId, viewId, true);
vack.setSender(gmsJoinLeaveMemberId);
gmsJoinLeave.processMessage(vack);
int tries = 0;
while (!vc.waiting) {
if (tries > 30) {
Assert.fail("view creator never finished");
}
tries++;
Thread.sleep(1000);
}
NetView newView = gmsJoinLeave.getView();
System.out.println("new view is " + newView);
assertTrue(newView.contains(mockMembers[1]));
assertTrue(newView.getViewId() > preparedView.getViewId());
}
private NetView createView() {
List<InternalDistributedMember> mbrs = new LinkedList<>();
Set<InternalDistributedMember> shutdowns = new HashSet<>();
Set<InternalDistributedMember> crashes = new HashSet<>();
mbrs.add(mockMembers[0]);
mbrs.add(mockMembers[1]);
mbrs.add(mockMembers[2]);
mbrs.add(gmsJoinLeaveMemberId);
// prepare the view
NetView netView = new NetView(mockMembers[0], 1, mbrs, shutdowns, crashes);
return netView;
}
@Test
public void testCoordinatorFindRequestSuccess() throws Exception {
try {
initMocks(false);
HashSet<InternalDistributedMember> registrants = new HashSet<>();
registrants.add(mockMembers[0]);
FindCoordinatorResponse fcr = new FindCoordinatorResponse(mockMembers[0], mockMembers[0],
false, null, registrants, false, true, null);
NetView view = createView();
JoinResponseMessage jrm = new JoinResponseMessage(mockMembers[0], view, 0);
TcpClientWrapper tcpClientWrapper = mock(TcpClientWrapper.class);
gmsJoinLeave.setTcpClientWrapper(tcpClientWrapper);
FindCoordinatorRequest fcreq =
new FindCoordinatorRequest(gmsJoinLeaveMemberId, new HashSet<>(), -1, null, 0, "");
int connectTimeout = (int) services.getConfig().getMemberTimeout() * 2;
when(tcpClientWrapper.sendCoordinatorFindRequest(new InetSocketAddress("localhost", 12345),
fcreq, connectTimeout)).thenReturn(fcr);
callAsnyc(() -> {
gmsJoinLeave.installView(view);
});
assertTrue("Should be able to join ", gmsJoinLeave.join());
} finally {
}
}
private void callAsnyc(Runnable run) {
Thread th = new Thread(run);
th.start();
}
@Test
public void testCoordinatorFindRequestFailure() throws Exception {
try {
initMocks(false);
HashSet<InternalDistributedMember> registrants = new HashSet<>();
registrants.add(mockMembers[0]);
FindCoordinatorResponse fcr = new FindCoordinatorResponse(mockMembers[0], mockMembers[0],
false, null, registrants, false, true, null);
NetView view = createView();
JoinResponseMessage jrm = new JoinResponseMessage(mockMembers[0], view, 0);
gmsJoinLeave.setJoinResponseMessage(jrm);
TcpClientWrapper tcpClientWrapper = mock(TcpClientWrapper.class);
gmsJoinLeave.setTcpClientWrapper(tcpClientWrapper);
FindCoordinatorRequest fcreq =
new FindCoordinatorRequest(gmsJoinLeaveMemberId, new HashSet<>(), -1, null, 0, "");
int connectTimeout = (int) services.getConfig().getMemberTimeout() * 2;
// passing wrong port here, so ot will fail
when(tcpClientWrapper.sendCoordinatorFindRequest(new InetSocketAddress("localhost", 12346),
fcreq, connectTimeout)).thenReturn(fcr);
assertFalse("Should not be able to join ", gmsJoinLeave.join());
} finally {
}
}
private void waitForViewAndFinalCheckInProgress(int viewId) throws InterruptedException {
// wait for the view processing thread to collect and process the requests
int sleeps = 0;
while (!gmsJoinLeave.isStopping() && (gmsJoinLeave.getView().getViewId() == viewId)) {
if (sleeps++ > 20) {
System.out.println("view requests: " + gmsJoinLeave.getViewRequests());
System.out.println("current view: " + gmsJoinLeave.getView());
throw new RuntimeException("timeout waiting for view #" + viewId);
}
Thread.sleep(1000);
System.out.println("Empty sleeps " + sleeps + " stopping: " + gmsJoinLeave.isStopping());
}
}
class GMSJoinLeaveTest extends GMSJoinLeave {
public GMSJoinLeaveTest() {
super();
}
@Override
boolean checkIfAvailable(InternalDistributedMember fmbr) {
if (removeMember != null) {
try {
if (removeMember.equals(fmbr)) {
GMSJoinLeaveJUnitTest.this.processRemoveMessage(fmbr);
Thread.sleep(1000000);
}
} catch (InterruptedException e) {
}
return true;
} else if (leaveMember != null) {
try {
if (leaveMember.equals(fmbr)) {
GMSJoinLeaveJUnitTest.this.processLeaveMessage(fmbr);
Thread.sleep(1000000);
}
} catch (InterruptedException e) {
}
return true;
} else {
return super.checkIfAvailable(fmbr);
}
}
}
@Test
public void testRemoveRequestWhileWaitingForFinalResponse() throws Exception {
initMocks(true, true);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
installView();
int viewId = gmsJoinLeave.getView().getViewId();
System.out.println("Current viewid " + viewId);
this.removeMember = mockMembers[0];
processJoinMessage(gmsJoinLeave.getMemberID(), mockMembers[2], 98989);
waitForViewAndFinalCheckInProgress(viewId);
this.removeMember = null;
assertTrue("testFlagForRemovalRequest should be true",
gmsJoinLeave.getViewCreator().getTestFlageForRemovalRequest());
}
@Test
public void testLeaveRequestWhileWaitingForFinalResponse() throws Exception {
initMocks(true, true);
GMSJoinLeaveTestHelper.becomeCoordinatorForTest(gmsJoinLeave);
installView();
int viewId = gmsJoinLeave.getView().getViewId();
System.out.println("Current viewid " + viewId);
this.leaveMember = mockMembers[0];
processJoinMessage(gmsJoinLeave.getMemberID(), mockMembers[2], 98989);
waitForViewAndFinalCheckInProgress(viewId);
this.leaveMember = null;
assertTrue("testFlagForRemovalRequest should be true",
gmsJoinLeave.getViewCreator().getTestFlageForRemovalRequest());
}
private void installView() throws Exception {
final int viewInstallationTime = 15000;
NetView oldView = null;
long giveup = System.currentTimeMillis() + viewInstallationTime;
while (System.currentTimeMillis() < giveup && oldView == null) {
Thread.sleep(500);
oldView = gmsJoinLeave.getView();
}
assertTrue(oldView != null); // it should have become coordinator and installed a view
NetView newView = new NetView(oldView, oldView.getViewId() + 1);
newView.add(mockMembers[0]);
newView.add(mockMembers[1]);
gmsJoinLeave.installView(newView);
}
private void processJoinMessage(InternalDistributedMember coordinator,
InternalDistributedMember newMember, int port) {
JoinRequestMessage reqMsg = new JoinRequestMessage(coordinator, newMember, null, port, 0);
gmsJoinLeave.processMessage(reqMsg);
}
private void processRemoveMessage(InternalDistributedMember rMember) {
RemoveMemberMessage msg =
new RemoveMemberMessage(gmsJoinLeave.getMemberID(), rMember, "testing");
msg.setSender(gmsJoinLeave.getMemberID());
gmsJoinLeave.processMessage(msg);
}
private void processLeaveMessage(InternalDistributedMember rMember) {
LeaveRequestMessage msg =
new LeaveRequestMessage(gmsJoinLeave.getMemberID(), rMember, "testing");
msg.setSender(rMember);
gmsJoinLeave.processMessage(msg);
}
}
|
GEODE-2653: Add FlakyTest category to test with Thread.sleep
|
geode-core/src/test/java/org/apache/geode/distributed/internal/membership/gms/membership/GMSJoinLeaveJUnitTest.java
|
GEODE-2653: Add FlakyTest category to test with Thread.sleep
|
|
Java
|
apache-2.0
|
ebe025c6238880a2932cac987fa9029dca81aeed
| 0
|
suxinde2009/Android-Jigsaw-Puzzle,RudraNilBasu/Android-Jigsaw-Puzzle,prashant31191/Android-Jigsaw-Puzzle,Drooids/Android-Jigsaw-Puzzle,sergio11/Android-Jigsaw-Puzzle,julesbond007/Android-Jigsaw-Puzzle,julesbond007/Android-Jigsaw-Puzzle
|
package com.jigdraw.draw.views;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.MotionEvent;
import android.view.View;
import com.jigdraw.draw.R;
/**
* Custom view to represent the drawing canvas the user use to draw.
* <p/>
* Created by Jay Paulynice
*/
public class DrawingView extends View {
/**
* the draw path
*/
private Path drawPath;
/**
* draw and canvas paint
*/
private Paint drawPaint, canvasPaint;
/**
* default color
*/
private int paintColor = 0xFF660000;
/**
* the drawing canvas
*/
private Canvas drawCanvas;
/**
* the canvas bitmap
*/
private Bitmap canvasBitmap;
/**
* current and last brush size
*/
private float brushSize, lastBrushSize;
/**
* whether erase is set
*/
private boolean erase = false;
/**
* Create new drawing view with context and attributes and
* setting up the default parameters.
*
* @param context the context
* @param attrs the attributes
*/
public DrawingView(Context context, AttributeSet attrs) {
super(context, attrs);
setupDrawing();
}
/**
* Init parameters
*/
private void setupDrawing() {
//prepare for drawing and setup paint stroke properties
brushSize = getResources().getInteger(R.integer.medium_size);
lastBrushSize = brushSize;
drawPath = new Path();
drawPaint = new Paint();
drawPaint.setColor(paintColor);
drawPaint.setAntiAlias(true);
drawPaint.setStrokeWidth(brushSize);
drawPaint.setStyle(Paint.Style.STROKE);
drawPaint.setStrokeJoin(Paint.Join.ROUND);
drawPaint.setStrokeCap(Paint.Cap.ROUND);
canvasPaint = new Paint(Paint.DITHER_FLAG);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
canvasBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
drawCanvas = new Canvas(canvasBitmap);
}
@Override
protected void onDraw(Canvas canvas) {
canvas.drawBitmap(canvasBitmap, 0, 0, canvasPaint);
canvas.drawPath(drawPath, drawPaint);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
float touchX = event.getX();
float touchY = event.getY();
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
drawPath.moveTo(touchX, touchY);
break;
case MotionEvent.ACTION_MOVE:
drawPath.lineTo(touchX, touchY);
break;
case MotionEvent.ACTION_UP:
drawPath.lineTo(touchX, touchY);
drawCanvas.drawPath(drawPath, drawPaint);
drawPath.reset();
break;
default:
return false;
}
//redraw
invalidate();
return true;
}
/**
* Set new color for drawing
*
* @param newColor the new color
*/
public void setColor(String newColor) {
invalidate();
paintColor = Color.parseColor(newColor);
drawPaint.setColor(paintColor);
}
/**
* Set new brush size for drawing
*
* @param newSize the new color
*/
public void setBrushSize(float newSize) {
float pixelAmount = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
newSize, getResources().getDisplayMetrics());
brushSize = pixelAmount;
drawPaint.setStrokeWidth(brushSize);
}
/**
* @return last brush size
*/
public float getLastBrushSize() {
return lastBrushSize;
}
/**
* Set last brush size
*
* @param lastSize the brush size
*/
public void setLastBrushSize(float lastSize) {
lastBrushSize = lastSize;
}
/**
* Set erase to true when the erase button is clicked.
*
* @param isErase {@code true} if erase is clicked {@code false} otherwise
*/
public void setErase(boolean isErase) {
erase = isErase;
if (erase) drawPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
else drawPaint.setXfermode(null);
}
/**
* Create new canvas
*/
public void startNew() {
drawCanvas.drawColor(0, PorterDuff.Mode.CLEAR);
invalidate();
}
}
|
app/src/main/java/com/jigdraw/draw/views/DrawingView.java
|
package com.jigdraw.draw.views;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.MotionEvent;
import android.view.View;
import com.jigdraw.draw.R;
/**
* Custom view to represent the drawing canvas the user use to draw.
*
* Created by Jay Paulynice
*/
public class DrawingView extends View {
private Path drawPath;
private Paint drawPaint, canvasPaint;
private int paintColor = 0xFF660000;
private Canvas drawCanvas;
private Bitmap canvasBitmap;
private float brushSize, lastBrushSize;
private boolean erase = false;
public DrawingView(Context context, AttributeSet attrs) {
super(context, attrs);
setupDrawing();
}
private void setupDrawing() {
//prepare for drawing and setup paint stroke properties
brushSize = getResources().getInteger(R.integer.medium_size);
lastBrushSize = brushSize;
drawPath = new Path();
drawPaint = new Paint();
drawPaint.setColor(paintColor);
drawPaint.setAntiAlias(true);
drawPaint.setStrokeWidth(brushSize);
drawPaint.setStyle(Paint.Style.STROKE);
drawPaint.setStrokeJoin(Paint.Join.ROUND);
drawPaint.setStrokeCap(Paint.Cap.ROUND);
canvasPaint = new Paint(Paint.DITHER_FLAG);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
canvasBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888);
drawCanvas = new Canvas(canvasBitmap);
}
@Override
protected void onDraw(Canvas canvas) {
canvas.drawBitmap(canvasBitmap, 0, 0, canvasPaint);
canvas.drawPath(drawPath, drawPaint);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
float touchX = event.getX();
float touchY = event.getY();
//respond to down, move and up events
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
drawPath.moveTo(touchX, touchY);
break;
case MotionEvent.ACTION_MOVE:
drawPath.lineTo(touchX, touchY);
break;
case MotionEvent.ACTION_UP:
drawPath.lineTo(touchX, touchY);
drawCanvas.drawPath(drawPath, drawPaint);
drawPath.reset();
break;
default:
return false;
}
//redraw
invalidate();
return true;
}
public void setColor(String newColor) {
invalidate();
paintColor = Color.parseColor(newColor);
drawPaint.setColor(paintColor);
}
public void setBrushSize(float newSize) {
float pixelAmount = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
newSize, getResources().getDisplayMetrics());
brushSize = pixelAmount;
drawPaint.setStrokeWidth(brushSize);
}
public float getLastBrushSize() {
return lastBrushSize;
}
public void setLastBrushSize(float lastSize) {
lastBrushSize = lastSize;
}
public void setErase(boolean isErase) {
erase = isErase;
if (erase) drawPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
else drawPaint.setXfermode(null);
}
public void startNew() {
drawCanvas.drawColor(0, PorterDuff.Mode.CLEAR);
invalidate();
}
}
|
changes
|
app/src/main/java/com/jigdraw/draw/views/DrawingView.java
|
changes
|
|
Java
|
apache-2.0
|
4e9133241b61150338f70fac42de5b19488e1b52
| 0
|
apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc
|
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.orchestration.internal.yaml.representer;
import org.yaml.snakeyaml.introspector.Property;
import org.yaml.snakeyaml.nodes.CollectionNode;
import org.yaml.snakeyaml.nodes.MappingNode;
import org.yaml.snakeyaml.nodes.Node;
import org.yaml.snakeyaml.nodes.NodeTuple;
import org.yaml.snakeyaml.nodes.ScalarNode;
import org.yaml.snakeyaml.nodes.SequenceNode;
import org.yaml.snakeyaml.nodes.Tag;
import org.yaml.snakeyaml.representer.Represent;
import org.yaml.snakeyaml.representer.Representer;
import java.util.Collection;
import java.util.HashSet;
/**
* Datasource parameter representer.
*
* @author panjuan
*/
public class DataSourceParameterRepresenter extends Representer {
private static Collection<String> reservedNodeNames = new HashSet<>();
static {
reservedNodeNames.add("password");
}
public DataSourceParameterRepresenter() {
super();
nullRepresenter = new DataSourceParameterRepresenter.NullRepresent();
}
@Override
protected NodeTuple representJavaBeanProperty(final Object javaBean, final Property property, final Object propertyValue, final Tag customTag) {
NodeTuple tuple = super.representJavaBeanProperty(javaBean, property, propertyValue, customTag);
Node valueNode = tuple.getValueNode();
Node keyNode = tuple.getKeyNode();
if (keyNode instanceof ScalarNode && ((ScalarNode) keyNode).getValue().equals("password")) {
return tuple;
}
if (Tag.NULL.equals(valueNode.getTag())) {
return null;
}
if (valueNode instanceof CollectionNode) {
if (Tag.SEQ.equals(valueNode.getTag()) && ((SequenceNode) valueNode).getValue().isEmpty()) {
return null;
}
if (Tag.MAP.equals(valueNode.getTag()) && ((MappingNode) valueNode).getValue().isEmpty()) {
return null;
}
}
return tuple;
}
private boolean isNullNode(final Node valueNode) {
return Tag.NULL.equals(valueNode.getTag());
}
private boolean isEmptyCollectionNode(final Node valueNode) {
return valueNode instanceof CollectionNode && (isEmptySequenceNode(valueNode) || isEmptyMappingNode(valueNode));
}
private boolean isEmptySequenceNode(final Node valueNode) {
return Tag.SEQ.equals(valueNode.getTag()) && ((SequenceNode) valueNode).getValue().isEmpty();
}
private boolean isEmptyMappingNode(final Node valueNode) {
return Tag.MAP.equals(valueNode.getTag()) && ((MappingNode) valueNode).getValue().isEmpty();
}
private class NullRepresent implements Represent {
public Node representData(final Object data) {
return representScalar(Tag.NULL, "");
}
}
}
|
sharding-orchestration/sharding-orchestration-core/src/main/java/io/shardingsphere/orchestration/internal/yaml/representer/DataSourceParameterRepresenter.java
|
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.orchestration.internal.yaml.representer;
import org.yaml.snakeyaml.introspector.Property;
import org.yaml.snakeyaml.nodes.CollectionNode;
import org.yaml.snakeyaml.nodes.MappingNode;
import org.yaml.snakeyaml.nodes.Node;
import org.yaml.snakeyaml.nodes.NodeTuple;
import org.yaml.snakeyaml.nodes.ScalarNode;
import org.yaml.snakeyaml.nodes.SequenceNode;
import org.yaml.snakeyaml.nodes.Tag;
import org.yaml.snakeyaml.representer.Represent;
import org.yaml.snakeyaml.representer.Representer;
import java.util.Collection;
import java.util.HashSet;
/**
* Datasource parameter representer.
*
* @author panjuan
*/
public class DataSourceParameterRepresenter extends Representer {
private static Collection<String> reservedNodeNames = new HashSet<>();
static {
reservedNodeNames.add("password");
}
public DataSourceParameterRepresenter() {
super();
nullRepresenter = new DataSourceParameterRepresenter.NullRepresent();
}
@Override
protected NodeTuple representJavaBeanProperty(final Object javaBean, final Property property, final Object propertyValue, final Tag customTag) {
NodeTuple tuple = super.representJavaBeanProperty(javaBean, property, propertyValue, customTag);
Node valueNode = tuple.getValueNode();
Node keyNode = tuple.getKeyNode();
if (keyNode instanceof ScalarNode && ((ScalarNode) keyNode).getValue().equals("password")) {
return tuple;
}
if (Tag.NULL.equals(valueNode.getTag())) {
return null;
}
if (valueNode instanceof CollectionNode) {
if (Tag.SEQ.equals(valueNode.getTag()) && ((SequenceNode) valueNode).getValue().isEmpty()) {
return null;
}
if (Tag.MAP.equals(valueNode.getTag()) && ((MappingNode) valueNode).getValue().isEmpty()) {
return null;
}
}
return tuple;
}
private class NullRepresent implements Represent {
public Node representData(final Object data) {
return representScalar(Tag.NULL, "");
}
}
}
|
isNullNode()
|
sharding-orchestration/sharding-orchestration-core/src/main/java/io/shardingsphere/orchestration/internal/yaml/representer/DataSourceParameterRepresenter.java
|
isNullNode()
|
|
Java
|
apache-2.0
|
3947474a62b295d82cfebf6fe2152499be4b3efe
| 0
|
maheshika/carbon-governance,isuruwan/carbon-governance,thushara35/carbon-governance,jranabahu/carbon-governance,jranabahu/carbon-governance,cnapagoda/carbon-governance,Rajith90/carbon-governance,isuruwan/carbon-governance,wso2/carbon-governance,denuwanthi/carbon-governance,prasa7/carbon-governance,thushara35/carbon-governance,sameerak/carbon-governance,thushara35/carbon-governance,isuruwan/carbon-governance,laki88/carbon-governance,denuwanthi/carbon-governance,prasa7/carbon-governance,denuwanthi/carbon-governance,isuruwan/carbon-governance,wso2/carbon-governance,daneshk/carbon-governance,denuwanthi/carbon-governance,sameerak/carbon-governance,laki88/carbon-governance,wso2/carbon-governance,Rajith90/carbon-governance,wso2/carbon-governance,laki88/carbon-governance,daneshk/carbon-governance,jranabahu/carbon-governance,Rajith90/carbon-governance,cnapagoda/carbon-governance,daneshk/carbon-governance,prasa7/carbon-governance,maheshika/carbon-governance,Rajith90/carbon-governance,prasa7/carbon-governance,maheshika/carbon-governance,sameerak/carbon-governance,sameerak/carbon-governance,laki88/carbon-governance,maheshika/carbon-governance,thushara35/carbon-governance,jranabahu/carbon-governance,cnapagoda/carbon-governance,cnapagoda/carbon-governance,daneshk/carbon-governance
|
/*
* Copyright (c) 2005-2009, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.governance.registry.extensions.validators;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.util.GovernanceUtils;
import org.wso2.carbon.governance.registry.extensions.interfaces.CustomValidations;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext;
import org.wso2.carbon.registry.core.session.UserRegistry;
import java.util.Map;
public class AttributeExistenceValidator implements CustomValidations {
private static final Log log = LogFactory.getLog(AttributeExistenceValidator.class);
private String[] attributes = new String[0];
public void init(Map parameterMap) {
if (parameterMap != null) {
String temp = (String) parameterMap.get("attributes");
if (temp != null) {
attributes = temp.split(",");
}
}
}
public boolean validate(RequestContext context) {
if (attributes.length == 0) {
return true;
}
String resourcePath = context.getResourcePath().getPath();
int index = resourcePath.indexOf(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
if (index < 0) {
log.warn("Unable to use Validator For Resource Path: " + resourcePath);
return false;
}
index += RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH.length();
if (resourcePath.length() <= index) {
log.warn("Unable to use Validator For Resource Path: " + resourcePath);
return false;
}
resourcePath = resourcePath.substring(index);
try {
UserRegistry registry = ((UserRegistry) context.getSystemRegistry());
if (!registry.resourceExists(resourcePath)) {
registry = ((UserRegistry) context.getSystemRegistry())
.getChrootedRegistry(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
}
GovernanceArtifact governanceArtifact =
GovernanceUtils.retrieveGovernanceArtifactByPath(registry, resourcePath);
for (String attribute : attributes) {
if (!validateAttribute(governanceArtifact, attribute)) {
return false;
}
}
} catch (RegistryException e) {
log.error("Unable to obtain registry instance", e);
}
return true;
}
protected boolean validateAttribute(GovernanceArtifact governanceArtifact, String attribute)
throws GovernanceException {
return (governanceArtifact.getAttribute(attribute) != null);
}
}
|
components/governance/org.wso2.carbon.governance.registry.extensions/src/org/wso2/carbon/governance/registry/extensions/validators/AttributeExistenceValidator.java
|
/*
* Copyright (c) 2005-2009, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.governance.registry.extensions.validators;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.governance.api.common.dataobjects.GovernanceArtifact;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.util.GovernanceUtils;
import org.wso2.carbon.governance.registry.extensions.interfaces.CustomValidations;
import org.wso2.carbon.registry.core.RegistryConstants;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext;
import org.wso2.carbon.registry.core.session.UserRegistry;
import java.util.Map;
public class AttributeExistenceValidator implements CustomValidations {
private static final Log log = LogFactory.getLog(AttributeExistenceValidator.class);
private String[] attributes = new String[0];
public void init(Map parameterMap) {
if (parameterMap != null) {
String temp = (String) parameterMap.get("attributes");
if (temp != null) {
attributes = temp.split(",");
}
}
}
public boolean validate(RequestContext context) {
if (attributes.length == 0) {
return true;
}
String resourcePath = context.getResourcePath().getPath();
int index = resourcePath.indexOf(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
if (index < 0) {
log.warn("Unable to use Validator For Resource Path: " + resourcePath);
return false;
}
index += RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH.length();
if (resourcePath.length() <= index) {
log.warn("Unable to use Validator For Resource Path: " + resourcePath);
return false;
}
resourcePath = resourcePath.substring(index);
try {
UserRegistry registry = ((UserRegistry) context.getSystemRegistry())
.getChrootedRegistry(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH);
GovernanceArtifact governanceArtifact =
GovernanceUtils.retrieveGovernanceArtifactByPath(registry, resourcePath);
for (String attribute : attributes) {
if (!validateAttribute(governanceArtifact, attribute)) {
return false;
}
}
} catch (RegistryException e) {
log.error("Unable to obtain registry instance", e);
}
return true;
}
protected boolean validateAttribute(GovernanceArtifact governanceArtifact, String attribute)
throws GovernanceException {
return (governanceArtifact.getAttribute(attribute) != null);
}
}
|
Updating AttributeExistenceValidator registry retireval mechanism to check for regitry type before changing registry root
|
components/governance/org.wso2.carbon.governance.registry.extensions/src/org/wso2/carbon/governance/registry/extensions/validators/AttributeExistenceValidator.java
|
Updating AttributeExistenceValidator registry retireval mechanism to check for regitry type before changing registry root
|
|
Java
|
apache-2.0
|
ebd98939d24a3d333770fff708f5d8f7a1c251e9
| 0
|
MichaelNedzelsky/intellij-community,xfournet/intellij-community,clumsy/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,da1z/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,petteyg/intellij-community,ryano144/intellij-community,signed/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,holmes/intellij-community,jexp/idea2,Lekanich/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,consulo/consulo,samthor/intellij-community,joewalnes/idea-community,allotria/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,samthor/intellij-community,apixandru/intellij-community,signed/intellij-community,fnouama/intellij-community,allotria/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,signed/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,ryano144/intellij-community,clumsy/intellij-community,ernestp/consulo,salguarnieri/intellij-community,vvv1559/intellij-community,allotria/intellij-community,caot/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,allotria/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,jexp/idea2,petteyg/intellij-community,joewalnes/idea-community,blademainer/intellij-community,Distrotech/intellij-community,kool79/intellij-community,clumsy/intellij-community,fnouama/intellij-community,FHannes/intellij-community,ibinti/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,kool79/intellij-community,semonte/intellij-community,akosyakov/intellij-community,signed/intellij-community,slisson/intellij-community,vladmm/intellij-community,da1z/intellij-community,supersven/intellij-community,clumsy/intellij-community,slisson/intellij-community,izonder/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,caot/intellij-community,vladmm/intellij-community,asedunov/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,da1z/intellij-community,semonte/intellij-community,FHannes/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,asedunov/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,izonder/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,kdwink/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,amith01994/intellij-community,caot/intellij-community,kdwink/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,fitermay/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,semonte/intellij-community,apixandru/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,kool79/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,amith01994/intellij-community,FHannes/intellij-community,xfournet/intellij-community,allotria/intellij-community,holmes/intellij-community,vvv1559/intellij-community,kool79/intellij-community,jexp/idea2,izonder/intellij-community,slisson/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,Distrotech/intellij-community,joewalnes/idea-community,diorcety/intellij-community,petteyg/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,diorcety/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,ernestp/consulo,fnouama/intellij-community,apixandru/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,slisson/intellij-community,FHannes/intellij-community,slisson/intellij-community,signed/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,youdonghai/intellij-community,holmes/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,fnouama/intellij-community,caot/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,caot/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,ryano144/intellij-community,blademainer/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,SerCeMan/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,retomerz/intellij-community,clumsy/intellij-community,retomerz/intellij-community,da1z/intellij-community,consulo/consulo,da1z/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,petteyg/intellij-community,robovm/robovm-studio,supersven/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,lucafavatella/intellij-community,ernestp/consulo,tmpgit/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,kdwink/intellij-community,izonder/intellij-community,petteyg/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,allotria/intellij-community,blademainer/intellij-community,kool79/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,supersven/intellij-community,retomerz/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,adedayo/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,xfournet/intellij-community,hurricup/intellij-community,Lekanich/intellij-community,da1z/intellij-community,fitermay/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,fnouama/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,signed/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,Lekanich/intellij-community,izonder/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,jexp/idea2,wreckJ/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,amith01994/intellij-community,adedayo/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,supersven/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,jagguli/intellij-community,apixandru/intellij-community,diorcety/intellij-community,allotria/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,slisson/intellij-community,supersven/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,consulo/consulo,nicolargo/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,orekyuu/intellij-community,da1z/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,ryano144/intellij-community,diorcety/intellij-community,hurricup/intellij-community,diorcety/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,supersven/intellij-community,signed/intellij-community,suncycheng/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,ernestp/consulo,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,vladmm/intellij-community,retomerz/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,gnuhub/intellij-community,semonte/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,apixandru/intellij-community,kool79/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,ernestp/consulo,supersven/intellij-community,xfournet/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,xfournet/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,joewalnes/idea-community,akosyakov/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,jexp/idea2,blademainer/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,ernestp/consulo,semonte/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,blademainer/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,jexp/idea2,samthor/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,holmes/intellij-community,ibinti/intellij-community,jexp/idea2,slisson/intellij-community,nicolargo/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,asedunov/intellij-community,blademainer/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,signed/intellij-community,amith01994/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,jexp/idea2,jagguli/intellij-community,adedayo/intellij-community,kdwink/intellij-community,FHannes/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,clumsy/intellij-community,samthor/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ibinti/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,ryano144/intellij-community,diorcety/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,robovm/robovm-studio,vvv1559/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,petteyg/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,adedayo/intellij-community,consulo/consulo,pwoodworth/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,dslomov/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,slisson/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,holmes/intellij-community,ryano144/intellij-community,allotria/intellij-community,youdonghai/intellij-community,caot/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,apixandru/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,izonder/intellij-community,hurricup/intellij-community,dslomov/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,allotria/intellij-community,izonder/intellij-community,vladmm/intellij-community,slisson/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,semonte/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,samthor/intellij-community,amith01994/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,signed/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,joewalnes/idea-community,FHannes/intellij-community,semonte/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,gnuhub/intellij-community,supersven/intellij-community,hurricup/intellij-community,jagguli/intellij-community,retomerz/intellij-community,holmes/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,apixandru/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,robovm/robovm-studio,kool79/intellij-community,semonte/intellij-community,dslomov/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,robovm/robovm-studio,diorcety/intellij-community,muntasirsyed/intellij-community,joewalnes/idea-community,tmpgit/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,supersven/intellij-community,fitermay/intellij-community,fitermay/intellij-community,FHannes/intellij-community,consulo/consulo,semonte/intellij-community,lucafavatella/intellij-community,signed/intellij-community,wreckJ/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,youdonghai/intellij-community,dslomov/intellij-community,supersven/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,ryano144/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,holmes/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,holmes/intellij-community
|
package com.intellij.refactoring.util;
import com.intellij.ant.PsiAntElement;
import com.intellij.codeInsight.ChangeContextUtil;
import com.intellij.codeInsight.ExpectedTypeInfo;
import com.intellij.codeInsight.ExpectedTypesProvider;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightControlFlowUtil;
import com.intellij.codeInsight.highlighting.HighlightManager;
import com.intellij.codeInspection.redundantCast.RedundantCastUtil;
import com.intellij.lang.StdLanguages;
import com.intellij.lang.properties.psi.Property;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.impl.ModuleUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.controlFlow.ControlFlowUtil;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.javadoc.PsiDocTag;
import com.intellij.psi.jsp.WebDirectoryElement;
import com.intellij.psi.search.*;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.psi.xml.XmlElementDecl;
import com.intellij.psi.xml.XmlTag;
import com.intellij.refactoring.PackageWrapper;
import com.intellij.refactoring.RefactoringSettings;
import com.intellij.refactoring.ui.InfoDialog;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.Processor;
import com.intellij.util.containers.HashMap;
import gnu.trove.THashMap;
import java.io.File;
import java.util.*;
public class RefactoringUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.util.RefactoringUtil");
public static final int EXPR_COPY_SAFE = 0;
public static final int EXPR_COPY_UNSAFE = 1;
public static final int EXPR_COPY_PROHIBITED = 2;
public static void showInfoDialog(String info, Project project) {
RefactoringSettings settings = RefactoringSettings.getInstance();
if (settings.IS_SHOW_ACTION_INFO) {
InfoDialog usagesWarning = new InfoDialog(info, project);
usagesWarning.show();
settings.IS_SHOW_ACTION_INFO = usagesWarning.isToShowInFuture();
}
}
public static boolean isSourceRoot(final PsiDirectory directory) {
if (directory.getManager() == null) return false;
final Project project = directory.getProject();
if (project == null) return false;
final VirtualFile virtualFile = directory.getVirtualFile();
final VirtualFile sourceRootForFile = ProjectRootManager.getInstance(project).getFileIndex().getSourceRootForFile(virtualFile);
return Comparing.equal(virtualFile, sourceRootForFile);
}
public static boolean isInStaticContext(PsiElement element) {
return PsiUtil.getEnclosingStaticElement(element, null) != null;
}
public static boolean isResolvableType(PsiType type) {
return type.accept(new PsiTypeVisitor<Boolean>() {
public Boolean visitPrimitiveType(PsiPrimitiveType primitiveType) {
return Boolean.TRUE;
}
public Boolean visitArrayType(PsiArrayType arrayType) {
return arrayType.getComponentType().accept(this);
}
public Boolean visitClassType(PsiClassType classType) {
if (classType.resolve() == null) return Boolean.FALSE;
PsiType[] parameters = classType.getParameters();
for (PsiType parameter : parameters) {
if (parameter != null && !parameter.accept(this).booleanValue()) return Boolean.FALSE;
}
return Boolean.TRUE;
}
public Boolean visitWildcardType(PsiWildcardType wildcardType) {
if (wildcardType.getBound() != null) return wildcardType.getBound().accept(this);
return Boolean.TRUE;
}
}).booleanValue();
}
public static PsiElement replaceOccurenceWithFieldRef(PsiExpression occurrence, PsiField newField, PsiClass destinationClass)
throws IncorrectOperationException {
final PsiManager manager = occurrence.getManager();
final String fieldName = newField.getName();
final PsiVariable psiVariable = manager.getResolveHelper().resolveReferencedVariable(fieldName, occurrence);
final PsiElementFactory factory = manager.getElementFactory();
if (psiVariable != null && psiVariable.equals(newField)) {
return occurrence.replace(factory.createExpressionFromText(fieldName, null));
}
else {
final PsiReferenceExpression ref = (PsiReferenceExpression)factory.createExpressionFromText("this." + fieldName, null);
if (newField.hasModifierProperty(PsiModifier.STATIC)) {
final PsiReferenceExpression referenceExpression =
factory.createReferenceExpression(destinationClass);
ref.getQualifierExpression().replace(referenceExpression);
}
return occurrence.replace(ref);
}
}
/**
* @see com.intellij.psi.codeStyle.CodeStyleManager#suggestUniqueVariableName(String, com.intellij.psi.PsiElement, boolean)
* Cannot use method from code style manager: a collision with fieldToReplace is not a collision
*/
public static String suggestUniqueVariableName(String baseName, PsiElement place, PsiField fieldToReplace) {
int index = 0;
while (true) {
final String name = index > 0 ? baseName + index : baseName;
index++;
final PsiManager manager = place.getManager();
PsiResolveHelper helper = manager.getResolveHelper();
PsiVariable refVar = helper.resolveReferencedVariable(name, place);
if (refVar != null && !manager.areElementsEquivalent(refVar, fieldToReplace)) continue;
class Cancel extends RuntimeException {
}
try {
place.accept(new PsiRecursiveElementVisitor() {
public void visitClass(PsiClass aClass) {
}
public void visitVariable(PsiVariable variable) {
if (name.equals(variable.getName())) {
throw new Cancel();
}
}
});
}
catch (Cancel e) {
continue;
}
return name;
}
}
public static boolean isValidName(final Project project, final PsiElement psiElement, final String newName) {
if (newName == null) {
return false;
}
if (psiElement instanceof PsiAntElement) {
return newName.trim().matches("[\\d\\w\\_\\.\\-]*");
}
if (psiElement instanceof PsiFile || psiElement instanceof PsiDirectory) {
return newName.indexOf(File.separatorChar) < 0 && newName.indexOf('/') < 0;
}
if (psiElement instanceof WebDirectoryElement) {
return newName.indexOf('/') < 0;
}
if (psiElement instanceof XmlTag ||
psiElement instanceof XmlAttribute ||
psiElement instanceof XmlElementDecl
) {
return newName.trim().matches("([\\d\\w\\_\\.\\-]+:)?[\\d\\w\\_\\.\\-]+");
}
if (psiElement instanceof XmlAttributeValue) {
return true; // ask meta data
}
if (psiElement instanceof Property) {
return true;
}
return psiElement.getLanguage().getNamesValidator().isIdentifier(newName.trim(), project);
}
//order of usages accross different files is irrelevant
public static void sortDepthFirstRightLeftOrder(final UsageInfo[] usages) {
Arrays.sort(usages, new Comparator<UsageInfo>() {
public int compare(final UsageInfo usage1, final UsageInfo usage2) {
final PsiElement element1 = usage1.getElement();
final PsiElement element2 = usage2.getElement();
if (element1 == null || element2 == null) return 0;
return element2.getTextRange().getStartOffset() - element1.getTextRange().getStartOffset();
}
});
}
public static interface UsageInfoFactory {
UsageInfo createUsageInfo(PsiElement usage, int startOffset, int endOffset);
}
public static void addUsagesInStringsAndComments(PsiElement element, String stringToSearch, List<UsageInfo> results,
UsageInfoFactory factory) {
PsiManager manager = element.getManager();
PsiSearchHelper helper = manager.getSearchHelper();
SearchScope scope = element.getUseScope();
scope = scope.intersectWith(GlobalSearchScope.projectScope(manager.getProject()));
int index = stringToSearch.lastIndexOf('.');
String identifierToSearch = index >= 0 ? stringToSearch.substring(index + 1) : stringToSearch;
PsiLiteralExpression[] literals = helper.findStringLiteralsContainingIdentifier(identifierToSearch, scope);
for (PsiLiteralExpression literal : literals) {
processStringOrComment(literal, stringToSearch, results, factory);
}
PsiElement[] comments = helper.findCommentsContainingIdentifier(identifierToSearch, scope);
for (PsiElement comment : comments) {
processStringOrComment(comment, stringToSearch, results, factory);
}
}
public static boolean isSearchTextOccurencesEnabled(PsiElement element) {
return element instanceof PsiPackage || (element instanceof PsiClass && ((PsiClass)element).getQualifiedName() != null) ||
(element instanceof PsiFile && !StdLanguages.JAVA.equals(element.getLanguage()));
}
public static PsiElement getVariableScope(PsiLocalVariable localVar) {
if (!(localVar instanceof ImplicitVariable)) {
return localVar.getParent().getParent();
}
else {
return ((ImplicitVariable)localVar).getDeclarationScope();
}
}
public static void addTextOccurences(PsiElement element, String stringToSearch, GlobalSearchScope searchScope,
final List<UsageInfo> results, final UsageInfoFactory factory) {
processTextOccurences(element, stringToSearch, searchScope, new Processor<UsageInfo>() {
public boolean process(UsageInfo t) {
results.add(t);
return true;
}
}, factory);
}
public static void processTextOccurences(PsiElement element, String stringToSearch, GlobalSearchScope searchScope,
final Processor<UsageInfo> processor, final UsageInfoFactory factory) {
PsiSearchHelper helper = element.getManager().getSearchHelper();
helper.processUsagesInNonJavaFiles(element, stringToSearch,
new PsiNonJavaFileReferenceProcessor() {
public boolean process(PsiFile psiFile, int startOffset, int endOffset) {
UsageInfo usageInfo = factory.createUsageInfo(psiFile, startOffset, endOffset);
if (usageInfo != null) {
if (!processor.process(usageInfo)) return false;
}
return true;
}
},
searchScope);
}
private static void processStringOrComment(PsiElement element, String stringToSearch, List<UsageInfo> results,
UsageInfoFactory factory) {
String elementText = element.getText();
for (int index = 0; index < elementText.length(); index++) {
index = elementText.indexOf(stringToSearch, index);
if (index < 0) break;
final PsiReference referenceAt = element.findReferenceAt(index);
if (referenceAt != null && referenceAt.resolve() != null) continue;
if (index > 0) {
char c = elementText.charAt(index - 1);
if (Character.isJavaIdentifierPart(c) && c != '$') {
continue;
}
}
if (index + stringToSearch.length() < elementText.length()) {
char c = elementText.charAt(index + stringToSearch.length());
if (Character.isJavaIdentifierPart(c) && c != '$') {
continue;
}
}
UsageInfo usageInfo = factory.createUsageInfo(element, index, index + stringToSearch.length());
if (usageInfo != null) {
results.add(usageInfo);
}
index += stringToSearch.length();
}
}
public static void renameNonCodeUsages(final Project project, final UsageInfo[] usages) {
PsiDocumentManager.getInstance(project).commitAllDocuments();
HashMap<PsiFile,ArrayList<UsageOffset>> filesToOffsetsMap = new HashMap<PsiFile, ArrayList<UsageOffset>>();
for (UsageInfo usage : usages) {
final PsiElement element = usage.getElement();
if (element == null || !element.isValid()) continue;
if (usage instanceof NonCodeUsageInfo) {
final PsiFile containingFile = element.getContainingFile();
int fileOffset = element.getTextRange().getStartOffset() + usage.startOffset;
ArrayList<UsageOffset> list = filesToOffsetsMap.get(containingFile);
if (list == null) {
list = new ArrayList<UsageOffset>();
filesToOffsetsMap.put(containingFile, list);
}
list.add(new UsageOffset(fileOffset, fileOffset + usage.endOffset - usage.startOffset,
((NonCodeUsageInfo)usage).newText));
}
}
for (PsiFile file : filesToOffsetsMap.keySet()) {
final Document editorDocument = PsiDocumentManager.getInstance(project).getDocument(file);
ArrayList<UsageOffset> list = filesToOffsetsMap.get(file);
UsageOffset[] offsets = list.toArray(new UsageOffset[list.size()]);
Arrays.sort(offsets);
for (int i = offsets.length - 1; i >= 0; i--) {
UsageOffset usageOffset = offsets[i];
editorDocument.replaceString(usageOffset.startOffset, usageOffset.endOffset, usageOffset.newText);
}
PsiDocumentManager.getInstance(project).commitDocument(editorDocument);
}
PsiDocumentManager.getInstance(project).commitAllDocuments();
}
private static class UsageOffset implements Comparable {
final int startOffset;
final int endOffset;
final String newText;
public UsageOffset(int startOffset, int endOffset, String newText) {
this.startOffset = startOffset;
this.endOffset = endOffset;
this.newText = newText;
}
public int compareTo(Object o) {
return startOffset - ((UsageOffset)o).startOffset;
}
}
public static PsiReturnStatement[] findReturnStatements(PsiMethod method) {
ArrayList<PsiElement> vector = new ArrayList<PsiElement>();
PsiCodeBlock body = method.getBody();
if (body != null) {
addReturnStatements(vector, body);
}
return vector.toArray(new PsiReturnStatement[vector.size()]);
}
private static void addReturnStatements(ArrayList<PsiElement> vector, PsiElement element) {
if (element instanceof PsiReturnStatement) {
vector.add(element);
}
else if (element instanceof PsiClass) {
return;
}
else {
PsiElement[] children = element.getChildren();
for (PsiElement child : children) {
addReturnStatements(vector, child);
}
}
}
public static PsiElement getParentStatement(PsiElement place, boolean skipScopingStatements) {
PsiElement parent = place;
while (true) {
if (parent instanceof PsiStatement) break;
parent = parent.getParent();
if (parent == null) return null;
}
PsiElement parentStatement = parent;
parent = parentStatement instanceof PsiStatement ? parentStatement : parentStatement.getParent();
while (parent instanceof PsiStatement) {
if (!skipScopingStatements &&
((parent instanceof PsiForStatement && parentStatement == ((PsiForStatement)parent).getBody())
|| (parent instanceof PsiForeachStatement && parentStatement == ((PsiForeachStatement)parent).getBody())
|| (parent instanceof PsiWhileStatement && parentStatement == ((PsiWhileStatement)parent).getBody())
|| (parent instanceof PsiIfStatement &&
(parentStatement == ((PsiIfStatement)parent).getThenBranch() || parentStatement == ((PsiIfStatement)parent).getElseBranch())))
) {
return parentStatement;
}
parentStatement = parent;
parent = parent.getParent();
}
return parentStatement;
}
public static PsiElement getParentExpressionAnchorElement(PsiElement place) {
PsiElement parent = place;
while (true) {
if (isExpressionAnchorElement(parent)) break;
parent = parent.getParent();
if (parent == null) return null;
}
PsiElement parentStatement = parent;
parent = parentStatement.getParent();
while (parent instanceof PsiStatement) {
parentStatement = parent;
parent = parent.getParent();
}
return parentStatement;
}
public static boolean isExpressionAnchorElement(PsiElement element) {
return element instanceof PsiStatement || element instanceof PsiClassInitializer
|| element instanceof PsiField || element instanceof PsiMethod;
}
/**
* @param expression
* @return loop body if expression is part of some loop's condition or for loop's increment part
* null otherwise
*/
public static PsiElement getLoopForLoopCondition(PsiExpression expression) {
PsiExpression outermost = expression;
while (outermost.getParent() instanceof PsiExpression) {
outermost = (PsiExpression)outermost.getParent();
}
if (outermost.getParent() instanceof PsiForStatement) {
final PsiForStatement forStatement = (PsiForStatement)outermost.getParent();
if (forStatement.getCondition() == outermost) {
return forStatement;
}
else {
return null;
}
}
if (outermost.getParent() instanceof PsiExpressionStatement && outermost.getParent().getParent() instanceof PsiForStatement) {
final PsiForStatement forStatement = (PsiForStatement)outermost.getParent().getParent();
if (forStatement.getUpdate() == outermost.getParent()) {
return forStatement;
}
else {
return null;
}
}
if (outermost.getParent() instanceof PsiWhileStatement) {
return outermost.getParent();
}
if (outermost.getParent() instanceof PsiDoWhileStatement) {
return outermost.getParent();
}
return null;
}
public static PsiClass getThisClass(PsiElement place) {
PsiElement parent = place.getContext();
if (parent == null) return null;
PsiElement prev = null;
while (true) {
if (parent instanceof PsiClass) {
if (!(parent instanceof PsiAnonymousClass && ((PsiAnonymousClass)parent).getArgumentList() == prev)) {
return (PsiClass)parent;
}
}
prev = parent;
parent = parent.getContext();
if (parent == null) return null;
}
}
public static PsiClass getThisResolveClass(final PsiReferenceExpression place) {
final JavaResolveResult resolveResult = place.advancedResolve(false);
final PsiElement scope = resolveResult.getCurrentFileResolveScope();
if (scope instanceof PsiClass) {
return (PsiClass)scope;
}
return null;
/*
PsiElement parent = place.getContext();
PsiElement prev = null;
while (true) {
if (parent instanceof PsiClass) {
if (!(parent instanceof PsiAnonymousClass && ((PsiAnonymousClass)parent).getArgumentList() == prev))
return (PsiClass)parent;
}
prev = parent;
parent = parent.getContext();
if (parent == null) return null;
}
*/
}
public static PsiCall getEnclosingConstructorCall (PsiJavaCodeReferenceElement ref) {
PsiElement parent = ref.getParent();
if (ref instanceof PsiReferenceExpression && parent instanceof PsiMethodCallExpression) return (PsiCall)parent;
if (parent instanceof PsiAnonymousClass) {
parent = parent.getParent();
}
return parent instanceof PsiNewExpression ? (PsiNewExpression)parent : null;
}
public static final PsiMethod getEnclosingMethod (PsiElement element) {
final PsiElement container = PsiTreeUtil.getParentOfType(element, PsiMethod.class, PsiClass.class);
return container instanceof PsiMethod ? ((PsiMethod)container) : null;
}
public static void renameVariableReferences(PsiVariable variable, String newName, SearchScope scope)
throws IncorrectOperationException {
PsiManager manager = variable.getManager();
PsiSearchHelper helper = manager.getSearchHelper();
PsiReference[] refs = helper.findReferences(variable, scope, false);
for (PsiReference reference : refs) {
if (reference != null) {
reference.handleElementRename(newName);
}
}
}
public static boolean canBeDeclaredFinal(PsiVariable variable) {
LOG.assertTrue(variable instanceof PsiLocalVariable || variable instanceof PsiParameter);
final boolean isReassigned = HighlightControlFlowUtil.isReassigned(variable, new THashMap<PsiElement, Collection<ControlFlowUtil.VariableInfo>>(), new THashMap<PsiParameter, Boolean>());
return !isReassigned;
}
public static PsiExpression inlineVariable(PsiLocalVariable variable, PsiExpression initializer,
PsiJavaCodeReferenceElement ref) throws IncorrectOperationException {
PsiManager manager = initializer.getManager();
PsiClass variableParent = RefactoringUtil.getThisClass(initializer);
PsiClass refParent = RefactoringUtil.getThisClass(ref);
initializer = convertInitializerToNormalExpression(initializer, variable.getType());
ChangeContextUtil.encodeContextInfo(initializer, false);
PsiExpression expr = (PsiExpression)ref.replace(initializer);
PsiType exprType = expr.getType();
if (exprType != null && !variable.getType().equals(exprType)) {
PsiTypeCastExpression cast = (PsiTypeCastExpression)manager.getElementFactory().createExpressionFromText("(t)a", null);
cast.getCastType().replace(variable.getTypeElement());
cast.getOperand().replace(expr);
PsiExpression exprCopy = (PsiExpression)expr.copy();
cast = (PsiTypeCastExpression)expr.replace(cast);
if (!RedundantCastUtil.isCastRedundant(cast)) {
expr = cast;
} else {
PsiElement toReplace = cast;
while (toReplace.getParent() instanceof PsiParenthesizedExpression) {
toReplace = toReplace.getParent();
}
expr = (PsiExpression)toReplace.replace(exprCopy);
}
}
ChangeContextUtil.clearContextInfo(initializer);
PsiClass thisClass = variableParent;
PsiThisExpression thisAccessExpr = null;
if (Comparing.equal(variableParent, refParent)) {
thisAccessExpr = createThisExpression(manager, null);
}
else {
if (!(thisClass instanceof PsiAnonymousClass)) {
thisAccessExpr = createThisExpression(manager, thisClass);
}
}
return (PsiExpression)ChangeContextUtil.decodeContextInfo(expr, thisClass, thisAccessExpr);
}
public static PsiThisExpression createThisExpression(PsiManager manager, PsiClass qualifierClass)
throws IncorrectOperationException {
PsiElementFactory factory = manager.getElementFactory();
if (qualifierClass != null) {
PsiThisExpression qualifiedThis = (PsiThisExpression)factory.createExpressionFromText("q.this", null);
qualifiedThis = (PsiThisExpression)CodeStyleManager.getInstance(manager.getProject()).reformat(qualifiedThis);
qualifiedThis.getQualifier().bindToElement(qualifierClass);
return qualifiedThis;
}
else {
return (PsiThisExpression)factory.createExpressionFromText("this", null);
}
}
/**
* removes a reference to the specified class from the reference list given
*
* @return if removed - a reference to the class or null if there were no references to this class in the reference list
*/
public static PsiJavaCodeReferenceElement removeFromReferenceList(PsiReferenceList refList, PsiClass aClass)
throws IncorrectOperationException {
PsiJavaCodeReferenceElement[] refs = refList.getReferenceElements();
for (PsiJavaCodeReferenceElement ref : refs) {
if (ref.isReferenceTo(aClass)) {
PsiJavaCodeReferenceElement refCopy = (PsiJavaCodeReferenceElement)ref.copy();
ref.delete();
return refCopy;
}
}
return null;
}
public static PsiJavaCodeReferenceElement findReferenceToClass(PsiReferenceList refList, PsiClass aClass) {
PsiJavaCodeReferenceElement[] refs = refList.getReferenceElements();
for (PsiJavaCodeReferenceElement ref : refs) {
if (ref.isReferenceTo(aClass)) {
return ref;
}
}
return null;
}
public static PsiType getTypeByExpressionWithExpectedType(PsiExpression expr) {
PsiType type = getTypeByExpression(expr);
if (type != null) return type;
ExpectedTypeInfo[] expectedTypes = ExpectedTypesProvider.getInstance(expr.getProject()).getExpectedTypes(expr, false);
if (expectedTypes.length == 1) {
type = expectedTypes[0].getType();
if (!type.equalsToText("java.lang.Object")) return type;
}
return null;
}
public static PsiType getTypeByExpression(PsiExpression expr) {
PsiElementFactory factory = expr.getManager().getElementFactory();
return getTypeByExpression(expr, factory);
}
private static PsiType getTypeByExpression(PsiExpression expr, final PsiElementFactory factory) {
PsiType type = expr.getType();
if (type == null) {
if (expr instanceof PsiArrayInitializerExpression) {
PsiExpression[] initializers = ((PsiArrayInitializerExpression)expr).getInitializers();
if (initializers != null && initializers.length > 0) {
PsiType initType = getTypeByExpression(initializers[0]);
if (initType == null) return null;
return initType.createArrayType();
}
}
return null;
}
PsiClass refClass = PsiUtil.resolveClassInType(type);
if (refClass instanceof PsiAnonymousClass) {
type = ((PsiAnonymousClass)refClass).getBaseClassType();
}
if (type == PsiType.NULL) {
ExpectedTypeInfo[] infos = ExpectedTypesProvider.getInstance(expr.getProject()).getExpectedTypes(expr, false);
if (infos.length == 1) {
type = infos[0].getType();
}
else {
type = factory.createTypeByFQClassName("java.lang.Object", expr.getResolveScope());
}
}
return GenericsUtil.getVariableTypeByExpressionType(type);
}
public static boolean isAssignmentLHS(PsiElement element) {
PsiElement parent = element.getParent();
if (parent instanceof PsiAssignmentExpression
&& element.equals(((PsiAssignmentExpression)parent).getLExpression())) {
return true;
}
else {
return isPlusPlusOrMinusMinus(parent);
}
}
public static boolean isPlusPlusOrMinusMinus(PsiElement element) {
if (element instanceof PsiPrefixExpression) {
PsiJavaToken operandSign = ((PsiPrefixExpression)element).getOperationSign();
return operandSign.getTokenType() == JavaTokenType.PLUSPLUS
|| operandSign.getTokenType() == JavaTokenType.MINUSMINUS;
}
else if (element instanceof PsiPostfixExpression) {
PsiJavaToken operandSign = ((PsiPostfixExpression)element).getOperationSign();
return operandSign.getTokenType() == JavaTokenType.PLUSPLUS
|| operandSign.getTokenType() == JavaTokenType.MINUSMINUS;
}
else {
return false;
}
}
public static void removeFinalParameters(PsiMethod method)
throws IncorrectOperationException {
// Remove final parameters
PsiParameterList paramList = method.getParameterList();
if (paramList != null) {
PsiParameter[] params = paramList.getParameters();
for (PsiParameter param : params) {
if (param.hasModifierProperty(PsiModifier.FINAL)) {
param.getModifierList().setModifierProperty(PsiModifier.FINAL, false);
}
}
}
}
public static PsiElement getAnchorElementForMultipleExpressions(PsiExpression[] occurrences, PsiElement scope) {
PsiElement anchor = null;
for (PsiExpression occurrence : occurrences) {
if (scope != null && !PsiTreeUtil.isAncestor(scope, occurrence, false)) {
continue;
}
PsiElement anchor1 = getParentExpressionAnchorElement(occurrence);
if (anchor1 == null) {
return null;
}
if (anchor == null) {
anchor = anchor1;
}
else {
PsiElement commonParent = PsiTreeUtil.findCommonParent(anchor, anchor1);
if (commonParent == null || anchor.getTextRange() == null || anchor1.getTextRange() == null) return null;
PsiElement firstAnchor = anchor.getTextRange().getStartOffset() < anchor1.getTextRange().getStartOffset() ?
anchor : anchor1;
if (commonParent.equals(firstAnchor)) {
anchor = firstAnchor;
}
else {
PsiElement parent = firstAnchor;
while (!parent.getParent().equals(commonParent)) {
parent = parent.getParent();
}
final PsiElement newAnchor = getParentExpressionAnchorElement(parent);
if (newAnchor != null) {
anchor = newAnchor;
}
else {
anchor = parent;
}
}
}
}
if (occurrences.length > 1 && anchor.getParent().getParent() instanceof PsiSwitchStatement) {
PsiSwitchStatement switchStatement = (PsiSwitchStatement)anchor.getParent().getParent();
if (switchStatement.getBody().equals(anchor.getParent())) {
int startOffset = occurrences[0].getTextRange().getStartOffset();
int endOffset = occurrences[occurrences.length - 1].getTextRange().getEndOffset();
PsiStatement[] statements = switchStatement.getBody().getStatements();
boolean isInDifferentCases = false;
for (PsiStatement statement : statements) {
if (statement instanceof PsiSwitchLabelStatement) {
int caseOffset = statement.getTextOffset();
if (startOffset < caseOffset && caseOffset < endOffset) {
isInDifferentCases = true;
break;
}
}
}
if (isInDifferentCases) {
anchor = switchStatement;
}
}
}
return anchor;
}
public static void setVisibility(PsiModifierList modifierList, String newVisibility)
throws IncorrectOperationException {
modifierList.setModifierProperty(PsiModifier.PRIVATE, false);
modifierList.setModifierProperty(PsiModifier.PUBLIC, false);
modifierList.setModifierProperty(PsiModifier.PROTECTED, false);
modifierList.setModifierProperty(newVisibility, true);
}
public static boolean isMethodUsage(PsiElement element) {
if (!(element instanceof PsiJavaCodeReferenceElement)) return false;
PsiElement parent = element.getParent();
if (parent instanceof PsiCall) {
return true;
}
else if (parent instanceof PsiAnonymousClass) {
return element.equals(((PsiAnonymousClass)parent).getBaseClassReference());
}
return false;
}
public static PsiExpressionList getArgumentListByMethodReference(PsiElement ref) {
if (ref instanceof PsiEnumConstant) return ((PsiEnumConstant)ref).getArgumentList();
PsiElement parent = ref.getParent();
if (parent instanceof PsiCall) {
return ((PsiCall)parent).getArgumentList();
}
else if (parent instanceof PsiAnonymousClass) {
return ((PsiNewExpression)parent.getParent()).getArgumentList();
}
LOG.assertTrue(false);
return null;
}
public static PsiCallExpression getCallExpressionByMethodReference(PsiJavaCodeReferenceElement ref) {
PsiElement parent = ref.getParent();
if (parent instanceof PsiMethodCallExpression) {
return (PsiMethodCallExpression)parent;
}
else if (parent instanceof PsiNewExpression) {
return (PsiNewExpression)parent;
}
else if (parent instanceof PsiAnonymousClass) {
return (PsiNewExpression)parent.getParent();
}
else {
LOG.assertTrue(false);
return null;
}
}
/**
* @return List of highlighters
*/
public static ArrayList<RangeHighlighter> highlightAllOccurences(Project project, PsiElement[] occurences, Editor editor) {
ArrayList<RangeHighlighter> highlighters = new ArrayList<RangeHighlighter>();
HighlightManager highlightManager = HighlightManager.getInstance(project);
EditorColorsManager colorsManager = EditorColorsManager.getInstance();
TextAttributes attributes = colorsManager.getGlobalScheme().getAttributes(EditorColors.SEARCH_RESULT_ATTRIBUTES);
highlightManager.addOccurrenceHighlights(editor, occurences, attributes, true, highlighters);
return highlighters;
}
public static ArrayList<RangeHighlighter> highlightOccurences(Project project, PsiElement[] occurences, Editor editor) {
if (occurences.length > 1) {
return highlightAllOccurences(project, occurences, editor);
}
return new ArrayList<RangeHighlighter>();
}
public static String createTempVar(PsiExpression expr, PsiElement context, boolean declareFinal)
throws IncorrectOperationException {
PsiElement anchorStatement = getParentStatement(context, true);
LOG.assertTrue(anchorStatement != null && anchorStatement.getParent() != null);
Project project = expr.getProject();
String[] suggestedNames =
CodeStyleManager.getInstance(project).suggestVariableName(VariableKind.LOCAL_VARIABLE, null, expr, null).names;
final String prefix = suggestedNames[0];
final String id = CodeStyleManager.getInstance(project).suggestUniqueVariableName(prefix, context, true);
PsiElementFactory factory = expr.getManager().getElementFactory();
if (expr instanceof PsiParenthesizedExpression) {
PsiExpression expr1 = ((PsiParenthesizedExpression)expr).getExpression();
if (expr1 != null) {
expr = expr1;
}
}
PsiDeclarationStatement decl =
factory.createVariableDeclarationStatement(id, expr.getType(), expr);
if (declareFinal) {
((PsiLocalVariable)decl.getDeclaredElements()[0]).getModifierList().setModifierProperty(PsiModifier.FINAL, true);
}
anchorStatement.getParent().addBefore(decl, anchorStatement);
return id;
}
public static int verifySafeCopyExpression(PsiElement expr) {
return verifySafeCopyExpressionSubElement(expr);
}
private static int verifySafeCopyExpressionSubElement(PsiElement element) {
int result = EXPR_COPY_SAFE;
if (element == null) return result;
if (element instanceof PsiThisExpression
|| element instanceof PsiSuperExpression
|| element instanceof PsiIdentifier
) {
return EXPR_COPY_SAFE;
}
if (element instanceof PsiMethodCallExpression) {
result = EXPR_COPY_UNSAFE;
}
if (element instanceof PsiNewExpression) {
return EXPR_COPY_PROHIBITED;
}
if (element instanceof PsiAssignmentExpression) {
return EXPR_COPY_PROHIBITED;
}
if (isPlusPlusOrMinusMinus(element)) {
return EXPR_COPY_PROHIBITED;
}
PsiElement[] children = element.getChildren();
for (PsiElement child : children) {
int childResult = verifySafeCopyExpressionSubElement(child);
result = Math.max(result, childResult);
}
return result;
}
public static PsiExpression convertInitializerToNormalExpression(PsiExpression expression,
PsiType forcedReturnType)
throws IncorrectOperationException {
PsiExpression returnValue;
if (expression instanceof PsiArrayInitializerExpression) {
returnValue =
createNewExpressionFromArrayInitializer((PsiArrayInitializerExpression)expression,
forcedReturnType);
}
else {
returnValue = expression;
}
return returnValue;
}
public static PsiExpression createNewExpressionFromArrayInitializer(PsiArrayInitializerExpression initializer,
PsiType forcedType)
throws IncorrectOperationException {
PsiType initializerType = null;
if (initializer != null) {
// initializerType = myExpresssion.getType();
if (forcedType != null) {
initializerType = forcedType;
}
else {
initializerType = getTypeByExpression(initializer);
}
}
if (initializerType == null) {
return initializer;
}
LOG.assertTrue(initializerType instanceof PsiArrayType);
PsiElementFactory factory = initializer.getManager().getElementFactory();
PsiNewExpression result =
(PsiNewExpression)factory.createExpressionFromText("new " + initializerType.getPresentableText() + "{}", null);
result = (PsiNewExpression)CodeStyleManager.getInstance(initializer.getProject()).reformat(result);
result.getArrayInitializer().replace(initializer);
return result;
}
public static void abstractizeMethod(PsiClass targetClass, PsiMethod method) throws IncorrectOperationException {
PsiCodeBlock body = method.getBody();
if (body != null) {
body.delete();
}
method.getModifierList().setModifierProperty(PsiModifier.ABSTRACT, true);
method.getModifierList().setModifierProperty(PsiModifier.FINAL, false);
method.getModifierList().setModifierProperty(PsiModifier.SYNCHRONIZED, false);
method.getModifierList().setModifierProperty(PsiModifier.NATIVE, false);
if (!targetClass.isInterface()) {
targetClass.getModifierList().setModifierProperty(PsiModifier.ABSTRACT, true);
}
removeFinalParameters(method);
}
public static boolean isInsideAnonymous(PsiElement element, PsiElement upTo) {
for (PsiElement current = element;
current != null && current != upTo;
current = current.getParent()) {
if (current instanceof PsiAnonymousClass) return true;
}
return false;
}
public static PsiExpression unparenthesizeExpression(PsiExpression expression) {
while (expression instanceof PsiParenthesizedExpression) {
final PsiExpression innerExpression = ((PsiParenthesizedExpression)expression).getExpression();
if (innerExpression == null) return expression;
expression = innerExpression;
}
return expression;
}
public static PsiExpression outermostParenthesizedExpression(PsiExpression expression) {
while (expression.getParent() instanceof PsiParenthesizedExpression) {
expression = (PsiParenthesizedExpression)expression.getParent();
}
return expression;
}
public static String getStringToSearch(PsiElement element, boolean nonJava) {
if (element instanceof PsiDirectory) { // normalize a directory to a corresponding package
final PsiPackage aPackage = ((PsiDirectory)element).getPackage();
if (aPackage != null) element = aPackage;
}
if (element instanceof PsiPackage) {
return nonJava ? ((PsiPackage)element).getQualifiedName() : ((PsiPackage)element).getName();
}
else if (element instanceof PsiClass) {
return nonJava ? ((PsiClass)element).getQualifiedName() : ((PsiClass)element).getName();
}
else if (element instanceof XmlTag) {
return ((XmlTag)element).getValue().getTrimmedText();
}
else if (element instanceof XmlAttribute) {
return ((XmlAttribute)element).getValue();
}
else if (element instanceof PsiNamedElement) {
return ((PsiNamedElement)element).getName();
}
else {
LOG.error("Unknown element type");
return null;
}
}
public static String getInnerClassNameForClassLoader(PsiClass aClass) {
final String qName = aClass.getQualifiedName();
return replaceDotsWithDollars(qName, aClass);
}
public static String replaceDotsWithDollars(final String qName, PsiClass aClass) {
StringBuffer qNameBuffer = new StringBuffer(qName);
int fromIndex = qNameBuffer.length();
PsiElement parent = aClass.getParent();
while (parent instanceof PsiClass) {
final int dotIndex = qNameBuffer.lastIndexOf(".", fromIndex);
if (dotIndex < 0) break;
qNameBuffer.replace(dotIndex, dotIndex + 1, "$");
fromIndex = dotIndex - 1;
parent = parent.getParent();
}
return qNameBuffer.toString();
}
public static String getNewInnerClassName(PsiClass aClass, String oldInnerClassName, String newName) {
if (!oldInnerClassName.endsWith(aClass.getName())) return newName;
StringBuffer buffer = new StringBuffer(oldInnerClassName);
buffer.replace(buffer.length() - aClass.getName().length(), buffer.length(), newName);
return buffer.toString();
}
public static boolean isSuperOrThisCall(PsiStatement statement, boolean testForSuper, boolean testForThis) {
if (!(statement instanceof PsiExpressionStatement)) return false;
PsiExpression expression = ((PsiExpressionStatement)statement).getExpression();
if (!(expression instanceof PsiMethodCallExpression)) return false;
final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression)expression).getMethodExpression();
if (testForSuper) {
if ("super".equals(methodExpression.getText())) return true;
}
if (testForThis) {
if ("this".equals(methodExpression.getText())) return true;
}
return false;
}
public static void visitImplicitConstructorUsages(PsiClass aClass,
final ImplicitConstructorUsageVisitor implicitConstructorUsageVistor) {
PsiManager manager = aClass.getManager();
GlobalSearchScope projectScope = GlobalSearchScope.projectScope(manager.getProject());
final PsiClass[] inheritors = manager.getSearchHelper().findInheritors(aClass, projectScope, false);
for (PsiClass inheritor : inheritors) {
visitImplicitSuperConstructorUsages(inheritor, implicitConstructorUsageVistor, aClass);
}
}
public static void visitImplicitSuperConstructorUsages(PsiClass subClass,
final ImplicitConstructorUsageVisitor implicitConstructorUsageVistor,
PsiClass superClass) {
final PsiMethod baseDefaultConstructor = findDefaultConstructor (superClass);
final PsiMethod[] constructors = subClass.getConstructors();
if (constructors.length > 0) {
for (PsiMethod constructor : constructors) {
final PsiStatement[] statements = constructor.getBody().getStatements();
if (statements.length < 1 || !isSuperOrThisCall(statements[0], true, true)) {
implicitConstructorUsageVistor.visitConstructor(constructor, baseDefaultConstructor);
}
}
}
else {
implicitConstructorUsageVistor.visitClassWithoutConstructors(subClass);
}
}
private static PsiMethod findDefaultConstructor(final PsiClass aClass) {
final PsiMethod[] constructors = aClass.getConstructors();
for (PsiMethod constructor : constructors) {
if (constructor.getParameterList().getParameters().length == 0) return constructor;
}
return null;
}
public static interface ImplicitConstructorUsageVisitor {
void visitConstructor(PsiMethod constructor, PsiMethod baseConstructor);
void visitClassWithoutConstructors(PsiClass aClass);
}
public interface Graph<T> {
Set<T> getVertices();
Set<T> getTargets(T source);
}
/**
* Returns subset of <code>graph.getVertices()</code> that is a tranistive closure (by <code>graph.getTargets()<code>)
* of the following property: initialRelation.value() of vertex or <code>graph.getTargets(vertex)</code> is true.
* <p/>
* Note that <code>graph.getTargets()</code> is not neccesrily a subset of <code>graph.getVertex()</code>
*
* @param graph
* @param initialRelation
* @return subset of graph.getVertices()
*/
public static <T> Set<T> transitiveClosure(Graph<T> graph, Condition<T> initialRelation) {
Set<T> result = new HashSet<T>();
final Set<T> vertices = graph.getVertices();
boolean anyChanged;
do {
anyChanged = false;
for (T currentVertex : vertices) {
if (!result.contains(currentVertex)) {
if (!initialRelation.value(currentVertex)) {
Set<T> targets = graph.getTargets(currentVertex);
for (T currentTarget : targets) {
if (result.contains(currentTarget) || initialRelation.value(currentTarget)) {
result.add(currentVertex);
anyChanged = true;
break;
}
}
}
else {
result.add(currentVertex);
}
}
}
}
while (anyChanged);
return result;
}
public static boolean equivalentTypes(PsiType t1, PsiType t2, PsiManager manager) {
while (t1 instanceof PsiArrayType) {
if (!(t2 instanceof PsiArrayType)) return false;
t1 = ((PsiArrayType)t1).getComponentType();
t2 = ((PsiArrayType)t2).getComponentType();
}
if (t1 instanceof PsiPrimitiveType) {
if (t2 instanceof PsiPrimitiveType) {
return t1.equals(t2);
}
else {
return false;
}
}
return manager.areElementsEquivalent(PsiUtil.resolveClassInType(t1), PsiUtil.resolveClassInType(t2));
}
public static List<PsiVariable> collectReferencedVariables(PsiElement scope) {
final List<PsiVariable> result = new ArrayList<PsiVariable>();
scope.accept(new PsiRecursiveElementVisitor() {
public void visitReferenceExpression(PsiReferenceExpression expression) {
final PsiElement element = expression.resolve();
if (element instanceof PsiVariable) {
result.add((PsiVariable)element);
}
final PsiExpression qualifier = expression.getQualifierExpression();
if (qualifier != null) {
qualifier.accept(this);
}
}
});
return result;
}
public static boolean isModifiedInScope(PsiVariable variable, PsiElement scope) {
final PsiReference[] references = variable.getManager().getSearchHelper().findReferences(variable, new LocalSearchScope(scope), false);
for (PsiReference reference : references) {
if (isAssignmentLHS(reference.getElement())) return true;
}
return false;
}
public static String getNameOfReferencedParameter(PsiDocTag tag) {
LOG.assertTrue("param".equals(tag.getName()));
final PsiElement[] dataElements = tag.getDataElements();
if (dataElements.length < 1) return null;
return dataElements[0].getText();
}
public static void fixJavadocsForParams(PsiMethod method, Set<PsiParameter> newParameters) throws IncorrectOperationException {
final PsiDocComment docComment = method.getDocComment();
if (docComment == null) return;
final PsiParameter[] parameters = method.getParameterList().getParameters();
final PsiDocTag[] paramTags = docComment.findTagsByName("param");
if (parameters.length > 0 && newParameters.size() < parameters.length && paramTags.length == 0) return;
Map<PsiParameter, PsiDocTag> tagForParam = new HashMap<PsiParameter, PsiDocTag>();
for (PsiParameter parameter : parameters) {
boolean found = false;
for (PsiDocTag paramTag : paramTags) {
if (parameter.getName().equals(getNameOfReferencedParameter(paramTag))) {
tagForParam.put(parameter, paramTag);
found = true;
break;
}
}
if (!found && !newParameters.contains(parameter)) {
tagForParam.put(parameter, null);
}
}
List<PsiDocTag> newTags = new ArrayList<PsiDocTag>();
for (PsiParameter parameter : parameters) {
if (tagForParam.containsKey(parameter)) {
final PsiDocTag psiDocTag = tagForParam.get(parameter);
if (psiDocTag != null) {
newTags.add((PsiDocTag)psiDocTag.copy());
}
}
else {
newTags.add(method.getManager().getElementFactory().createParamTag(parameter.getName(), ""));
}
}
PsiDocTag anchor = paramTags.length > 0 ? paramTags[paramTags.length - 1] : null;
for (PsiDocTag psiDocTag : newTags) {
anchor = (PsiDocTag)docComment.addAfter(psiDocTag, anchor);
}
for (PsiDocTag paramTag : paramTags) {
paramTag.delete();
}
}
public static PsiDirectory createPackageDirectoryInSourceRoot(PackageWrapper aPackage, final VirtualFile sourceRoot)
throws IncorrectOperationException {
final PsiDirectory[] directories = aPackage.getDirectories();
for (PsiDirectory directory : directories) {
if (VfsUtil.isAncestor(sourceRoot, directory.getVirtualFile(), false)) {
return directory;
}
}
String qNameToCreate = qNameToCreateInSourceRoot(aPackage, sourceRoot);
final String[] shortNames = qNameToCreate.split("\\.");
PsiDirectory current = aPackage.getManager().findDirectory(sourceRoot);
LOG.assertTrue(current != null);
for (String shortName : shortNames) {
PsiDirectory subdirectory = current.findSubdirectory(shortName);
if (subdirectory == null) {
subdirectory = current.createSubdirectory(shortName);
}
current = subdirectory;
}
return current;
}
public static String qNameToCreateInSourceRoot(PackageWrapper aPackage, final VirtualFile sourceRoot)
throws IncorrectOperationException {
String targetQName = aPackage.getQualifiedName();
String sourceRootPackage = ProjectRootManager.getInstance(aPackage.getManager().getProject()).getFileIndex().getPackageNameByDirectory(sourceRoot);
if (sourceRootPackage == null || !targetQName.startsWith(sourceRootPackage)) {
throw new IncorrectOperationException("Cannot create package '" + targetQName + "' in source folder " + sourceRoot.getPresentableUrl());
}
String result = targetQName.substring(sourceRootPackage.length());
if (StringUtil.startsWithChar(result, '.')) result = result.substring(1); // remove initial '.'
return result;
}
public static PsiDirectory findPackageDirectoryInSourceRoot(PackageWrapper aPackage, final VirtualFile sourceRoot) {
final PsiDirectory[] directories = aPackage.getDirectories();
for (PsiDirectory directory : directories) {
if (VfsUtil.isAncestor(sourceRoot, directory.getVirtualFile(), false)) {
return directory;
}
}
String qNameToCreate;
try {
qNameToCreate = qNameToCreateInSourceRoot(aPackage, sourceRoot);
}
catch (IncorrectOperationException e) {
return null;
}
final String[] shortNames = qNameToCreate.split("\\.");
PsiDirectory current = aPackage.getManager().findDirectory(sourceRoot);
LOG.assertTrue(current != null);
for (String shortName : shortNames) {
PsiDirectory subdirectory = current.findSubdirectory(shortName);
if (subdirectory == null) {
return null;
}
current = subdirectory;
}
return current;
}
public static String calculatePsiElementDescriptionList(PsiElement[] elements, StringBuffer buffer) {
if (elements.length == 1) {
buffer.append(UsageViewUtil.getType(elements[0]));
buffer.append(' ');
buffer.append(UsageViewUtil.getDescriptiveName(elements[0]));
}
else {
Map<String, Ref<Integer>> map = new HashMap<String, Ref<Integer>>();
for (PsiElement element : elements) {
final String type = UsageViewUtil.getType(element);
Ref<Integer> ref = map.get(type);
if (ref == null) {
ref = Ref.create(new Integer(0));
map.put(type, ref);
}
ref.set(new Integer(ref.get().intValue() + 1));
}
final Set<Map.Entry<String, Ref<Integer>>> entries = map.entrySet();
int index = 0;
for (Map.Entry<String, Ref<Integer>> entry : entries) {
final String type = entry.getKey();
final int count = entry.getValue().get().intValue();
if (index > 0 && index + 1 < entries.size()) {
buffer.append(" ,");
}
else if (index > 0 && index == entries.size()) {
buffer.append(" and ");
}
buffer.append(count);
buffer.append(" ");
buffer.append(count > 1 ? type : StringUtil.pluralize(type));
}
}
return buffer.toString();
}
public static class ConditionCache <T> implements Condition<T> {
private final Condition<T> myCondition;
private final HashSet<T> myProcessedSet = new HashSet<T>();
private final HashSet<T> myTrueSet = new HashSet<T>();
public ConditionCache(Condition<T> condition) {
myCondition = condition;
}
public boolean value(T object) {
if (!myProcessedSet.contains(object)) {
myProcessedSet.add(object);
final boolean value = myCondition.value(object);
if (value) {
myTrueSet.add(object);
return true;
}
return false;
}
return myTrueSet.contains(object);
}
}
public static class IsInheritorOf implements Condition<PsiClass> {
private final PsiClass myClass;
private final ConditionCache<PsiClass> myConditionCache;
public IsInheritorOf(PsiClass aClass) {
myClass = aClass;
myConditionCache = new ConditionCache<PsiClass>(new MyCondition());
}
public boolean value(PsiClass object) {
return myConditionCache.value(object);
}
private class MyCondition implements Condition<PsiClass> {
public boolean value(PsiClass aClass) {
return aClass.isInheritor(myClass, true);
}
}
}
public static class IsDescendantOf implements Condition<PsiClass> {
private final PsiClass myClass;
private final ConditionCache<PsiClass> myConditionCache;
public IsDescendantOf(PsiClass aClass) {
myClass = aClass;
myConditionCache = new ConditionCache<PsiClass>(new Condition<PsiClass>() {
public boolean value(PsiClass aClass) {
return InheritanceUtil.isInheritorOrSelf(aClass, myClass, true);
}
});
}
public boolean value(PsiClass aClass) {
return myConditionCache.value(aClass);
}
}
public static void processIncorrectOperation(final Project project, IncorrectOperationException e) {
final String message = e.getMessage();
final int index = message != null ? message.indexOf("java.io.IOException") : -1;
if (index >= 0) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
Messages.showMessageDialog(project, message.substring(index + "java.io.IOException".length()), "Error",
Messages.getErrorIcon());
}
});
}
else {
LOG.error(e);
}
}
public static void analyzeModuleConflicts(Project project,
Collection<? extends PsiElement> scope,
final UsageInfo[] usages,
PsiElement target,
final Collection<String> conflicts) {
if (scope == null) return;
final VirtualFile vFile;
if (!(target instanceof PsiDirectory)) {
vFile = target.getContainingFile().getVirtualFile();
}
else {
vFile = ((PsiDirectory)target).getVirtualFile();
}
if (vFile == null) return;
analyzeModuleConflicts(project, scope, usages, vFile, conflicts);
}
public static void analyzeModuleConflicts(Project project,
final Collection<? extends PsiElement> scopes,
final UsageInfo[] usages,
final VirtualFile vFile,
final Collection<String> conflicts) {
if (scopes == null) return;
for (final PsiElement scope : scopes) {
if (scope instanceof PsiPackage || scope instanceof PsiDirectory) return;
}
final Module targetModule = ModuleUtil.getModuleForFile(project, vFile);
if (targetModule == null) return;
final GlobalSearchScope resolveScope = GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(targetModule);
final HashSet<PsiElement> reported = new HashSet<PsiElement>();
for (final PsiElement scope : scopes) {
scope.accept(new PsiRecursiveElementVisitor() {
public void visitReferenceElement(PsiJavaCodeReferenceElement reference) {
super.visitReferenceElement(reference);
final PsiElement resolved = reference.resolve();
if (resolved != null && !reported.contains(resolved)
&& !isAncestor(resolved, scopes)
&& !PsiSearchScopeUtil.isInScope(resolveScope, resolved)) {
final String scopeDescription = ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(ConflictsUtil.getContainer(reference),
true));
final String message =
ConflictsUtil.capitalize(ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(resolved, true))) +
", referenced in " + scopeDescription +
", will not be accessible in module " +
ConflictsUtil.htmlEmphasize(targetModule.getName());
conflicts.add(message);
reported.add(resolved);
}
}
});
}
NextUsage:
for (UsageInfo usage : usages) {
if (usage instanceof MoveRenameUsageInfo) {
final MoveRenameUsageInfo moveRenameUsageInfo = ((MoveRenameUsageInfo)usage);
final PsiElement element = usage.getElement();
if (element != null &&
PsiTreeUtil.getParentOfType(element, PsiImportStatement.class, false) == null) {
for (PsiElement scope : scopes) {
if (PsiTreeUtil.isAncestor(scope, element, false)) continue NextUsage;
}
final GlobalSearchScope resolveScope1 = element.getResolveScope();
if (!resolveScope1.isSearchInModuleContent(targetModule)) {
final PsiMember container = ConflictsUtil.getContainer(element);
LOG.assertTrue(container != null);
final String scopeDescription = ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(container,
true));
Module module = ProjectRootManager.getInstance(project).getFileIndex().getModuleForFile(element.getContainingFile().getVirtualFile());
final String message =
ConflictsUtil.capitalize(ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(moveRenameUsageInfo.referencedElement, true))) +
", referenced in " + scopeDescription +
", will not be accessible from module " +
ConflictsUtil.htmlEmphasize(module.getName());
conflicts.add(message);
}
}
}
}
}
private static boolean isAncestor(final PsiElement resolved, final Collection<? extends PsiElement> scopes) {
for (final PsiElement scope : scopes) {
if (PsiTreeUtil.isAncestor(scope, resolved, false)) return true;
}
return false;
}
}
|
source/com/intellij/refactoring/util/RefactoringUtil.java
|
package com.intellij.refactoring.util;
import com.intellij.ant.PsiAntElement;
import com.intellij.codeInsight.ChangeContextUtil;
import com.intellij.codeInsight.ExpectedTypeInfo;
import com.intellij.codeInsight.ExpectedTypesProvider;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightControlFlowUtil;
import com.intellij.codeInsight.highlighting.HighlightManager;
import com.intellij.codeInspection.redundantCast.RedundantCastUtil;
import com.intellij.lang.StdLanguages;
import com.intellij.lang.properties.psi.Property;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.impl.ModuleUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.controlFlow.ControlFlowUtil;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.javadoc.PsiDocTag;
import com.intellij.psi.jsp.WebDirectoryElement;
import com.intellij.psi.search.*;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.psi.xml.XmlElementDecl;
import com.intellij.psi.xml.XmlTag;
import com.intellij.refactoring.PackageWrapper;
import com.intellij.refactoring.RefactoringSettings;
import com.intellij.refactoring.ui.InfoDialog;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.Processor;
import com.intellij.util.containers.HashMap;
import gnu.trove.THashMap;
import java.io.File;
import java.util.*;
public class RefactoringUtil {
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.util.RefactoringUtil");
public static final int EXPR_COPY_SAFE = 0;
public static final int EXPR_COPY_UNSAFE = 1;
public static final int EXPR_COPY_PROHIBITED = 2;
public static void showInfoDialog(String info, Project project) {
RefactoringSettings settings = RefactoringSettings.getInstance();
if (settings.IS_SHOW_ACTION_INFO) {
InfoDialog usagesWarning = new InfoDialog(info, project);
usagesWarning.show();
settings.IS_SHOW_ACTION_INFO = usagesWarning.isToShowInFuture();
}
}
public static boolean isSourceRoot(final PsiDirectory directory) {
if (directory.getManager() == null) return false;
final Project project = directory.getProject();
if (project == null) return false;
final VirtualFile virtualFile = directory.getVirtualFile();
final VirtualFile sourceRootForFile = ProjectRootManager.getInstance(project).getFileIndex().getSourceRootForFile(virtualFile);
return Comparing.equal(virtualFile, sourceRootForFile);
}
public static boolean isInStaticContext(PsiElement element) {
return PsiUtil.getEnclosingStaticElement(element, null) != null;
}
public static boolean isResolvableType(PsiType type) {
return type.accept(new PsiTypeVisitor<Boolean>() {
public Boolean visitPrimitiveType(PsiPrimitiveType primitiveType) {
return Boolean.TRUE;
}
public Boolean visitArrayType(PsiArrayType arrayType) {
return arrayType.getComponentType().accept(this);
}
public Boolean visitClassType(PsiClassType classType) {
if (classType.resolve() == null) return Boolean.FALSE;
PsiType[] parameters = classType.getParameters();
for (PsiType parameter : parameters) {
if (parameter != null && !parameter.accept(this).booleanValue()) return Boolean.FALSE;
}
return Boolean.TRUE;
}
public Boolean visitWildcardType(PsiWildcardType wildcardType) {
if (wildcardType.getBound() != null) return wildcardType.getBound().accept(this);
return Boolean.TRUE;
}
}).booleanValue();
}
public static PsiElement replaceOccurenceWithFieldRef(PsiExpression occurrence, PsiField newField, PsiClass destinationClass)
throws IncorrectOperationException {
final PsiManager manager = occurrence.getManager();
final String fieldName = newField.getName();
final PsiVariable psiVariable = manager.getResolveHelper().resolveReferencedVariable(fieldName, occurrence);
final PsiElementFactory factory = manager.getElementFactory();
if (psiVariable != null && psiVariable.equals(newField)) {
return occurrence.replace(factory.createExpressionFromText(fieldName, null));
}
else {
final PsiReferenceExpression ref = (PsiReferenceExpression)factory.createExpressionFromText("this." + fieldName, null);
if (newField.hasModifierProperty(PsiModifier.STATIC)) {
final PsiReferenceExpression referenceExpression =
factory.createReferenceExpression(destinationClass);
ref.getQualifierExpression().replace(referenceExpression);
}
return occurrence.replace(ref);
}
}
/**
* @see com.intellij.psi.codeStyle.CodeStyleManager#suggestUniqueVariableName(String, com.intellij.psi.PsiElement, boolean)
* Cannot use method from code style manager: a collision with fieldToReplace is not a collision
*/
public static String suggestUniqueVariableName(String baseName, PsiElement place, PsiField fieldToReplace) {
int index = 0;
while (true) {
final String name = index > 0 ? baseName + index : baseName;
index++;
final PsiManager manager = place.getManager();
PsiResolveHelper helper = manager.getResolveHelper();
PsiVariable refVar = helper.resolveReferencedVariable(name, place);
if (refVar != null && !manager.areElementsEquivalent(refVar, fieldToReplace)) continue;
class Cancel extends RuntimeException {
}
try {
place.accept(new PsiRecursiveElementVisitor() {
public void visitClass(PsiClass aClass) {
}
public void visitVariable(PsiVariable variable) {
if (name.equals(variable.getName())) {
throw new Cancel();
}
}
});
}
catch (Cancel e) {
continue;
}
return name;
}
}
public static boolean isValidName(final Project project, final PsiElement psiElement, final String newName) {
if (newName == null) {
return false;
}
if (psiElement instanceof PsiAntElement) {
return newName.trim().matches("[\\d\\w\\_\\.\\-]*");
}
if (psiElement instanceof PsiFile || psiElement instanceof PsiDirectory) {
return newName.indexOf(File.separatorChar) < 0 && newName.indexOf('/') < 0;
}
if (psiElement instanceof WebDirectoryElement) {
return newName.indexOf('/') < 0;
}
if (psiElement instanceof XmlTag ||
psiElement instanceof XmlAttribute ||
psiElement instanceof XmlElementDecl
) {
return newName.trim().matches("([\\d\\w\\_\\.\\-]+:)?[\\d\\w\\_\\.\\-]+");
}
if (psiElement instanceof XmlAttributeValue) {
return true; // ask meta data
}
if (psiElement instanceof Property) {
return true;
}
return psiElement.getLanguage().getNamesValidator().isIdentifier(newName.trim(), project);
}
//order of usages accross different files is irrelevant
public static void sortDepthFirstRightLeftOrder(final UsageInfo[] usages) {
Arrays.sort(usages, new Comparator<UsageInfo>() {
public int compare(final UsageInfo usage1, final UsageInfo usage2) {
final PsiElement element1 = usage1.getElement();
final PsiElement element2 = usage2.getElement();
LOG.assertTrue(element1 != null && element2 != null);
return element2.getTextRange().getStartOffset() - element1.getTextRange().getStartOffset();
}
});
}
public static interface UsageInfoFactory {
UsageInfo createUsageInfo(PsiElement usage, int startOffset, int endOffset);
}
public static void addUsagesInStringsAndComments(PsiElement element, String stringToSearch, List<UsageInfo> results,
UsageInfoFactory factory) {
PsiManager manager = element.getManager();
PsiSearchHelper helper = manager.getSearchHelper();
SearchScope scope = element.getUseScope();
scope = scope.intersectWith(GlobalSearchScope.projectScope(manager.getProject()));
int index = stringToSearch.lastIndexOf('.');
String identifierToSearch = index >= 0 ? stringToSearch.substring(index + 1) : stringToSearch;
PsiLiteralExpression[] literals = helper.findStringLiteralsContainingIdentifier(identifierToSearch, scope);
for (PsiLiteralExpression literal : literals) {
processStringOrComment(literal, stringToSearch, results, factory);
}
PsiElement[] comments = helper.findCommentsContainingIdentifier(identifierToSearch, scope);
for (PsiElement comment : comments) {
processStringOrComment(comment, stringToSearch, results, factory);
}
}
public static boolean isSearchTextOccurencesEnabled(PsiElement element) {
return element instanceof PsiPackage || (element instanceof PsiClass && ((PsiClass)element).getQualifiedName() != null) ||
(element instanceof PsiFile && !StdLanguages.JAVA.equals(element.getLanguage()));
}
public static PsiElement getVariableScope(PsiLocalVariable localVar) {
if (!(localVar instanceof ImplicitVariable)) {
return localVar.getParent().getParent();
}
else {
return ((ImplicitVariable)localVar).getDeclarationScope();
}
}
public static void addTextOccurences(PsiElement element, String stringToSearch, GlobalSearchScope searchScope,
final List<UsageInfo> results, final UsageInfoFactory factory) {
processTextOccurences(element, stringToSearch, searchScope, new Processor<UsageInfo>() {
public boolean process(UsageInfo t) {
results.add(t);
return true;
}
}, factory);
}
public static void processTextOccurences(PsiElement element, String stringToSearch, GlobalSearchScope searchScope,
final Processor<UsageInfo> processor, final UsageInfoFactory factory) {
PsiSearchHelper helper = element.getManager().getSearchHelper();
helper.processUsagesInNonJavaFiles(element, stringToSearch,
new PsiNonJavaFileReferenceProcessor() {
public boolean process(PsiFile psiFile, int startOffset, int endOffset) {
UsageInfo usageInfo = factory.createUsageInfo(psiFile, startOffset, endOffset);
if (usageInfo != null) {
if (!processor.process(usageInfo)) return false;
}
return true;
}
},
searchScope);
}
private static void processStringOrComment(PsiElement element, String stringToSearch, List<UsageInfo> results,
UsageInfoFactory factory) {
String elementText = element.getText();
for (int index = 0; index < elementText.length(); index++) {
index = elementText.indexOf(stringToSearch, index);
if (index < 0) break;
final PsiReference referenceAt = element.findReferenceAt(index);
if (referenceAt != null && referenceAt.resolve() != null) continue;
if (index > 0) {
char c = elementText.charAt(index - 1);
if (Character.isJavaIdentifierPart(c) && c != '$') {
continue;
}
}
if (index + stringToSearch.length() < elementText.length()) {
char c = elementText.charAt(index + stringToSearch.length());
if (Character.isJavaIdentifierPart(c) && c != '$') {
continue;
}
}
UsageInfo usageInfo = factory.createUsageInfo(element, index, index + stringToSearch.length());
if (usageInfo != null) {
results.add(usageInfo);
}
index += stringToSearch.length();
}
}
public static void renameNonCodeUsages(final Project project, final UsageInfo[] usages) {
PsiDocumentManager.getInstance(project).commitAllDocuments();
HashMap<PsiFile,ArrayList<UsageOffset>> filesToOffsetsMap = new HashMap<PsiFile, ArrayList<UsageOffset>>();
for (UsageInfo usage : usages) {
final PsiElement element = usage.getElement();
if (element == null || !element.isValid()) continue;
if (usage instanceof NonCodeUsageInfo) {
final PsiFile containingFile = element.getContainingFile();
int fileOffset = element.getTextRange().getStartOffset() + usage.startOffset;
ArrayList<UsageOffset> list = filesToOffsetsMap.get(containingFile);
if (list == null) {
list = new ArrayList<UsageOffset>();
filesToOffsetsMap.put(containingFile, list);
}
list.add(new UsageOffset(fileOffset, fileOffset + usage.endOffset - usage.startOffset,
((NonCodeUsageInfo)usage).newText));
}
}
for (PsiFile file : filesToOffsetsMap.keySet()) {
final Document editorDocument = PsiDocumentManager.getInstance(project).getDocument(file);
ArrayList<UsageOffset> list = filesToOffsetsMap.get(file);
UsageOffset[] offsets = list.toArray(new UsageOffset[list.size()]);
Arrays.sort(offsets);
for (int i = offsets.length - 1; i >= 0; i--) {
UsageOffset usageOffset = offsets[i];
editorDocument.replaceString(usageOffset.startOffset, usageOffset.endOffset, usageOffset.newText);
}
PsiDocumentManager.getInstance(project).commitDocument(editorDocument);
}
PsiDocumentManager.getInstance(project).commitAllDocuments();
}
private static class UsageOffset implements Comparable {
final int startOffset;
final int endOffset;
final String newText;
public UsageOffset(int startOffset, int endOffset, String newText) {
this.startOffset = startOffset;
this.endOffset = endOffset;
this.newText = newText;
}
public int compareTo(Object o) {
return startOffset - ((UsageOffset)o).startOffset;
}
}
public static PsiReturnStatement[] findReturnStatements(PsiMethod method) {
ArrayList<PsiElement> vector = new ArrayList<PsiElement>();
PsiCodeBlock body = method.getBody();
if (body != null) {
addReturnStatements(vector, body);
}
return vector.toArray(new PsiReturnStatement[vector.size()]);
}
private static void addReturnStatements(ArrayList<PsiElement> vector, PsiElement element) {
if (element instanceof PsiReturnStatement) {
vector.add(element);
}
else if (element instanceof PsiClass) {
return;
}
else {
PsiElement[] children = element.getChildren();
for (PsiElement child : children) {
addReturnStatements(vector, child);
}
}
}
public static PsiElement getParentStatement(PsiElement place, boolean skipScopingStatements) {
PsiElement parent = place;
while (true) {
if (parent instanceof PsiStatement) break;
parent = parent.getParent();
if (parent == null) return null;
}
PsiElement parentStatement = parent;
parent = parentStatement instanceof PsiStatement ? parentStatement : parentStatement.getParent();
while (parent instanceof PsiStatement) {
if (!skipScopingStatements &&
((parent instanceof PsiForStatement && parentStatement == ((PsiForStatement)parent).getBody())
|| (parent instanceof PsiForeachStatement && parentStatement == ((PsiForeachStatement)parent).getBody())
|| (parent instanceof PsiWhileStatement && parentStatement == ((PsiWhileStatement)parent).getBody())
|| (parent instanceof PsiIfStatement &&
(parentStatement == ((PsiIfStatement)parent).getThenBranch() || parentStatement == ((PsiIfStatement)parent).getElseBranch())))
) {
return parentStatement;
}
parentStatement = parent;
parent = parent.getParent();
}
return parentStatement;
}
public static PsiElement getParentExpressionAnchorElement(PsiElement place) {
PsiElement parent = place;
while (true) {
if (isExpressionAnchorElement(parent)) break;
parent = parent.getParent();
if (parent == null) return null;
}
PsiElement parentStatement = parent;
parent = parentStatement.getParent();
while (parent instanceof PsiStatement) {
parentStatement = parent;
parent = parent.getParent();
}
return parentStatement;
}
public static boolean isExpressionAnchorElement(PsiElement element) {
return element instanceof PsiStatement || element instanceof PsiClassInitializer
|| element instanceof PsiField || element instanceof PsiMethod;
}
/**
* @param expression
* @return loop body if expression is part of some loop's condition or for loop's increment part
* null otherwise
*/
public static PsiElement getLoopForLoopCondition(PsiExpression expression) {
PsiExpression outermost = expression;
while (outermost.getParent() instanceof PsiExpression) {
outermost = (PsiExpression)outermost.getParent();
}
if (outermost.getParent() instanceof PsiForStatement) {
final PsiForStatement forStatement = (PsiForStatement)outermost.getParent();
if (forStatement.getCondition() == outermost) {
return forStatement;
}
else {
return null;
}
}
if (outermost.getParent() instanceof PsiExpressionStatement && outermost.getParent().getParent() instanceof PsiForStatement) {
final PsiForStatement forStatement = (PsiForStatement)outermost.getParent().getParent();
if (forStatement.getUpdate() == outermost.getParent()) {
return forStatement;
}
else {
return null;
}
}
if (outermost.getParent() instanceof PsiWhileStatement) {
return outermost.getParent();
}
if (outermost.getParent() instanceof PsiDoWhileStatement) {
return outermost.getParent();
}
return null;
}
public static PsiClass getThisClass(PsiElement place) {
PsiElement parent = place.getContext();
if (parent == null) return null;
PsiElement prev = null;
while (true) {
if (parent instanceof PsiClass) {
if (!(parent instanceof PsiAnonymousClass && ((PsiAnonymousClass)parent).getArgumentList() == prev)) {
return (PsiClass)parent;
}
}
prev = parent;
parent = parent.getContext();
if (parent == null) return null;
}
}
public static PsiClass getThisResolveClass(final PsiReferenceExpression place) {
final JavaResolveResult resolveResult = place.advancedResolve(false);
final PsiElement scope = resolveResult.getCurrentFileResolveScope();
if (scope instanceof PsiClass) {
return (PsiClass)scope;
}
return null;
/*
PsiElement parent = place.getContext();
PsiElement prev = null;
while (true) {
if (parent instanceof PsiClass) {
if (!(parent instanceof PsiAnonymousClass && ((PsiAnonymousClass)parent).getArgumentList() == prev))
return (PsiClass)parent;
}
prev = parent;
parent = parent.getContext();
if (parent == null) return null;
}
*/
}
public static PsiCall getEnclosingConstructorCall (PsiJavaCodeReferenceElement ref) {
PsiElement parent = ref.getParent();
if (ref instanceof PsiReferenceExpression && parent instanceof PsiMethodCallExpression) return (PsiCall)parent;
if (parent instanceof PsiAnonymousClass) {
parent = parent.getParent();
}
return parent instanceof PsiNewExpression ? (PsiNewExpression)parent : null;
}
public static final PsiMethod getEnclosingMethod (PsiElement element) {
final PsiElement container = PsiTreeUtil.getParentOfType(element, PsiMethod.class, PsiClass.class);
return container instanceof PsiMethod ? ((PsiMethod)container) : null;
}
public static void renameVariableReferences(PsiVariable variable, String newName, SearchScope scope)
throws IncorrectOperationException {
PsiManager manager = variable.getManager();
PsiSearchHelper helper = manager.getSearchHelper();
PsiReference[] refs = helper.findReferences(variable, scope, false);
for (PsiReference reference : refs) {
if (reference != null) {
reference.handleElementRename(newName);
}
}
}
public static boolean canBeDeclaredFinal(PsiVariable variable) {
LOG.assertTrue(variable instanceof PsiLocalVariable || variable instanceof PsiParameter);
final boolean isReassigned = HighlightControlFlowUtil.isReassigned(variable, new THashMap<PsiElement, Collection<ControlFlowUtil.VariableInfo>>(), new THashMap<PsiParameter, Boolean>());
return !isReassigned;
}
public static PsiExpression inlineVariable(PsiLocalVariable variable, PsiExpression initializer,
PsiJavaCodeReferenceElement ref) throws IncorrectOperationException {
PsiManager manager = initializer.getManager();
PsiClass variableParent = RefactoringUtil.getThisClass(initializer);
PsiClass refParent = RefactoringUtil.getThisClass(ref);
initializer = convertInitializerToNormalExpression(initializer, variable.getType());
ChangeContextUtil.encodeContextInfo(initializer, false);
PsiExpression expr = (PsiExpression)ref.replace(initializer);
PsiType exprType = expr.getType();
if (exprType != null && !variable.getType().equals(exprType)) {
PsiTypeCastExpression cast = (PsiTypeCastExpression)manager.getElementFactory().createExpressionFromText("(t)a", null);
cast.getCastType().replace(variable.getTypeElement());
cast.getOperand().replace(expr);
PsiExpression exprCopy = (PsiExpression)expr.copy();
cast = (PsiTypeCastExpression)expr.replace(cast);
if (!RedundantCastUtil.isCastRedundant(cast)) {
expr = cast;
} else {
PsiElement toReplace = cast;
while (toReplace.getParent() instanceof PsiParenthesizedExpression) {
toReplace = toReplace.getParent();
}
expr = (PsiExpression)toReplace.replace(exprCopy);
}
}
ChangeContextUtil.clearContextInfo(initializer);
PsiClass thisClass = variableParent;
PsiThisExpression thisAccessExpr = null;
if (Comparing.equal(variableParent, refParent)) {
thisAccessExpr = createThisExpression(manager, null);
}
else {
if (!(thisClass instanceof PsiAnonymousClass)) {
thisAccessExpr = createThisExpression(manager, thisClass);
}
}
return (PsiExpression)ChangeContextUtil.decodeContextInfo(expr, thisClass, thisAccessExpr);
}
public static PsiThisExpression createThisExpression(PsiManager manager, PsiClass qualifierClass)
throws IncorrectOperationException {
PsiElementFactory factory = manager.getElementFactory();
if (qualifierClass != null) {
PsiThisExpression qualifiedThis = (PsiThisExpression)factory.createExpressionFromText("q.this", null);
qualifiedThis = (PsiThisExpression)CodeStyleManager.getInstance(manager.getProject()).reformat(qualifiedThis);
qualifiedThis.getQualifier().bindToElement(qualifierClass);
return qualifiedThis;
}
else {
return (PsiThisExpression)factory.createExpressionFromText("this", null);
}
}
/**
* removes a reference to the specified class from the reference list given
*
* @return if removed - a reference to the class or null if there were no references to this class in the reference list
*/
public static PsiJavaCodeReferenceElement removeFromReferenceList(PsiReferenceList refList, PsiClass aClass)
throws IncorrectOperationException {
PsiJavaCodeReferenceElement[] refs = refList.getReferenceElements();
for (PsiJavaCodeReferenceElement ref : refs) {
if (ref.isReferenceTo(aClass)) {
PsiJavaCodeReferenceElement refCopy = (PsiJavaCodeReferenceElement)ref.copy();
ref.delete();
return refCopy;
}
}
return null;
}
public static PsiJavaCodeReferenceElement findReferenceToClass(PsiReferenceList refList, PsiClass aClass) {
PsiJavaCodeReferenceElement[] refs = refList.getReferenceElements();
for (PsiJavaCodeReferenceElement ref : refs) {
if (ref.isReferenceTo(aClass)) {
return ref;
}
}
return null;
}
public static PsiType getTypeByExpressionWithExpectedType(PsiExpression expr) {
PsiType type = getTypeByExpression(expr);
if (type != null) return type;
ExpectedTypeInfo[] expectedTypes = ExpectedTypesProvider.getInstance(expr.getProject()).getExpectedTypes(expr, false);
if (expectedTypes.length == 1) {
type = expectedTypes[0].getType();
if (!type.equalsToText("java.lang.Object")) return type;
}
return null;
}
public static PsiType getTypeByExpression(PsiExpression expr) {
PsiElementFactory factory = expr.getManager().getElementFactory();
return getTypeByExpression(expr, factory);
}
private static PsiType getTypeByExpression(PsiExpression expr, final PsiElementFactory factory) {
PsiType type = expr.getType();
if (type == null) {
if (expr instanceof PsiArrayInitializerExpression) {
PsiExpression[] initializers = ((PsiArrayInitializerExpression)expr).getInitializers();
if (initializers != null && initializers.length > 0) {
PsiType initType = getTypeByExpression(initializers[0]);
if (initType == null) return null;
return initType.createArrayType();
}
}
return null;
}
PsiClass refClass = PsiUtil.resolveClassInType(type);
if (refClass instanceof PsiAnonymousClass) {
type = ((PsiAnonymousClass)refClass).getBaseClassType();
}
if (type == PsiType.NULL) {
ExpectedTypeInfo[] infos = ExpectedTypesProvider.getInstance(expr.getProject()).getExpectedTypes(expr, false);
if (infos.length == 1) {
type = infos[0].getType();
}
else {
type = factory.createTypeByFQClassName("java.lang.Object", expr.getResolveScope());
}
}
return GenericsUtil.getVariableTypeByExpressionType(type);
}
public static boolean isAssignmentLHS(PsiElement element) {
PsiElement parent = element.getParent();
if (parent instanceof PsiAssignmentExpression
&& element.equals(((PsiAssignmentExpression)parent).getLExpression())) {
return true;
}
else {
return isPlusPlusOrMinusMinus(parent);
}
}
public static boolean isPlusPlusOrMinusMinus(PsiElement element) {
if (element instanceof PsiPrefixExpression) {
PsiJavaToken operandSign = ((PsiPrefixExpression)element).getOperationSign();
return operandSign.getTokenType() == JavaTokenType.PLUSPLUS
|| operandSign.getTokenType() == JavaTokenType.MINUSMINUS;
}
else if (element instanceof PsiPostfixExpression) {
PsiJavaToken operandSign = ((PsiPostfixExpression)element).getOperationSign();
return operandSign.getTokenType() == JavaTokenType.PLUSPLUS
|| operandSign.getTokenType() == JavaTokenType.MINUSMINUS;
}
else {
return false;
}
}
public static void removeFinalParameters(PsiMethod method)
throws IncorrectOperationException {
// Remove final parameters
PsiParameterList paramList = method.getParameterList();
if (paramList != null) {
PsiParameter[] params = paramList.getParameters();
for (PsiParameter param : params) {
if (param.hasModifierProperty(PsiModifier.FINAL)) {
param.getModifierList().setModifierProperty(PsiModifier.FINAL, false);
}
}
}
}
public static PsiElement getAnchorElementForMultipleExpressions(PsiExpression[] occurrences, PsiElement scope) {
PsiElement anchor = null;
for (PsiExpression occurrence : occurrences) {
if (scope != null && !PsiTreeUtil.isAncestor(scope, occurrence, false)) {
continue;
}
PsiElement anchor1 = getParentExpressionAnchorElement(occurrence);
if (anchor1 == null) {
return null;
}
if (anchor == null) {
anchor = anchor1;
}
else {
PsiElement commonParent = PsiTreeUtil.findCommonParent(anchor, anchor1);
if (commonParent == null || anchor.getTextRange() == null || anchor1.getTextRange() == null) return null;
PsiElement firstAnchor = anchor.getTextRange().getStartOffset() < anchor1.getTextRange().getStartOffset() ?
anchor : anchor1;
if (commonParent.equals(firstAnchor)) {
anchor = firstAnchor;
}
else {
PsiElement parent = firstAnchor;
while (!parent.getParent().equals(commonParent)) {
parent = parent.getParent();
}
final PsiElement newAnchor = getParentExpressionAnchorElement(parent);
if (newAnchor != null) {
anchor = newAnchor;
}
else {
anchor = parent;
}
}
}
}
if (occurrences.length > 1 && anchor.getParent().getParent() instanceof PsiSwitchStatement) {
PsiSwitchStatement switchStatement = (PsiSwitchStatement)anchor.getParent().getParent();
if (switchStatement.getBody().equals(anchor.getParent())) {
int startOffset = occurrences[0].getTextRange().getStartOffset();
int endOffset = occurrences[occurrences.length - 1].getTextRange().getEndOffset();
PsiStatement[] statements = switchStatement.getBody().getStatements();
boolean isInDifferentCases = false;
for (PsiStatement statement : statements) {
if (statement instanceof PsiSwitchLabelStatement) {
int caseOffset = statement.getTextOffset();
if (startOffset < caseOffset && caseOffset < endOffset) {
isInDifferentCases = true;
break;
}
}
}
if (isInDifferentCases) {
anchor = switchStatement;
}
}
}
return anchor;
}
public static void setVisibility(PsiModifierList modifierList, String newVisibility)
throws IncorrectOperationException {
modifierList.setModifierProperty(PsiModifier.PRIVATE, false);
modifierList.setModifierProperty(PsiModifier.PUBLIC, false);
modifierList.setModifierProperty(PsiModifier.PROTECTED, false);
modifierList.setModifierProperty(newVisibility, true);
}
public static boolean isMethodUsage(PsiElement element) {
if (!(element instanceof PsiJavaCodeReferenceElement)) return false;
PsiElement parent = element.getParent();
if (parent instanceof PsiCall) {
return true;
}
else if (parent instanceof PsiAnonymousClass) {
return element.equals(((PsiAnonymousClass)parent).getBaseClassReference());
}
return false;
}
public static PsiExpressionList getArgumentListByMethodReference(PsiElement ref) {
if (ref instanceof PsiEnumConstant) return ((PsiEnumConstant)ref).getArgumentList();
PsiElement parent = ref.getParent();
if (parent instanceof PsiCall) {
return ((PsiCall)parent).getArgumentList();
}
else if (parent instanceof PsiAnonymousClass) {
return ((PsiNewExpression)parent.getParent()).getArgumentList();
}
LOG.assertTrue(false);
return null;
}
public static PsiCallExpression getCallExpressionByMethodReference(PsiJavaCodeReferenceElement ref) {
PsiElement parent = ref.getParent();
if (parent instanceof PsiMethodCallExpression) {
return (PsiMethodCallExpression)parent;
}
else if (parent instanceof PsiNewExpression) {
return (PsiNewExpression)parent;
}
else if (parent instanceof PsiAnonymousClass) {
return (PsiNewExpression)parent.getParent();
}
else {
LOG.assertTrue(false);
return null;
}
}
/**
* @return List of highlighters
*/
public static ArrayList<RangeHighlighter> highlightAllOccurences(Project project, PsiElement[] occurences, Editor editor) {
ArrayList<RangeHighlighter> highlighters = new ArrayList<RangeHighlighter>();
HighlightManager highlightManager = HighlightManager.getInstance(project);
EditorColorsManager colorsManager = EditorColorsManager.getInstance();
TextAttributes attributes = colorsManager.getGlobalScheme().getAttributes(EditorColors.SEARCH_RESULT_ATTRIBUTES);
highlightManager.addOccurrenceHighlights(editor, occurences, attributes, true, highlighters);
return highlighters;
}
public static ArrayList<RangeHighlighter> highlightOccurences(Project project, PsiElement[] occurences, Editor editor) {
if (occurences.length > 1) {
return highlightAllOccurences(project, occurences, editor);
}
return new ArrayList<RangeHighlighter>();
}
public static String createTempVar(PsiExpression expr, PsiElement context, boolean declareFinal)
throws IncorrectOperationException {
PsiElement anchorStatement = getParentStatement(context, true);
LOG.assertTrue(anchorStatement != null && anchorStatement.getParent() != null);
Project project = expr.getProject();
String[] suggestedNames =
CodeStyleManager.getInstance(project).suggestVariableName(VariableKind.LOCAL_VARIABLE, null, expr, null).names;
final String prefix = suggestedNames[0];
final String id = CodeStyleManager.getInstance(project).suggestUniqueVariableName(prefix, context, true);
PsiElementFactory factory = expr.getManager().getElementFactory();
if (expr instanceof PsiParenthesizedExpression) {
PsiExpression expr1 = ((PsiParenthesizedExpression)expr).getExpression();
if (expr1 != null) {
expr = expr1;
}
}
PsiDeclarationStatement decl =
factory.createVariableDeclarationStatement(id, expr.getType(), expr);
if (declareFinal) {
((PsiLocalVariable)decl.getDeclaredElements()[0]).getModifierList().setModifierProperty(PsiModifier.FINAL, true);
}
anchorStatement.getParent().addBefore(decl, anchorStatement);
return id;
}
public static int verifySafeCopyExpression(PsiElement expr) {
return verifySafeCopyExpressionSubElement(expr);
}
private static int verifySafeCopyExpressionSubElement(PsiElement element) {
int result = EXPR_COPY_SAFE;
if (element == null) return result;
if (element instanceof PsiThisExpression
|| element instanceof PsiSuperExpression
|| element instanceof PsiIdentifier
) {
return EXPR_COPY_SAFE;
}
if (element instanceof PsiMethodCallExpression) {
result = EXPR_COPY_UNSAFE;
}
if (element instanceof PsiNewExpression) {
return EXPR_COPY_PROHIBITED;
}
if (element instanceof PsiAssignmentExpression) {
return EXPR_COPY_PROHIBITED;
}
if (isPlusPlusOrMinusMinus(element)) {
return EXPR_COPY_PROHIBITED;
}
PsiElement[] children = element.getChildren();
for (PsiElement child : children) {
int childResult = verifySafeCopyExpressionSubElement(child);
result = Math.max(result, childResult);
}
return result;
}
public static PsiExpression convertInitializerToNormalExpression(PsiExpression expression,
PsiType forcedReturnType)
throws IncorrectOperationException {
PsiExpression returnValue;
if (expression instanceof PsiArrayInitializerExpression) {
returnValue =
createNewExpressionFromArrayInitializer((PsiArrayInitializerExpression)expression,
forcedReturnType);
}
else {
returnValue = expression;
}
return returnValue;
}
public static PsiExpression createNewExpressionFromArrayInitializer(PsiArrayInitializerExpression initializer,
PsiType forcedType)
throws IncorrectOperationException {
PsiType initializerType = null;
if (initializer != null) {
// initializerType = myExpresssion.getType();
if (forcedType != null) {
initializerType = forcedType;
}
else {
initializerType = getTypeByExpression(initializer);
}
}
if (initializerType == null) {
return initializer;
}
LOG.assertTrue(initializerType instanceof PsiArrayType);
PsiElementFactory factory = initializer.getManager().getElementFactory();
PsiNewExpression result =
(PsiNewExpression)factory.createExpressionFromText("new " + initializerType.getPresentableText() + "{}", null);
result = (PsiNewExpression)CodeStyleManager.getInstance(initializer.getProject()).reformat(result);
result.getArrayInitializer().replace(initializer);
return result;
}
public static void abstractizeMethod(PsiClass targetClass, PsiMethod method) throws IncorrectOperationException {
PsiCodeBlock body = method.getBody();
if (body != null) {
body.delete();
}
method.getModifierList().setModifierProperty(PsiModifier.ABSTRACT, true);
method.getModifierList().setModifierProperty(PsiModifier.FINAL, false);
method.getModifierList().setModifierProperty(PsiModifier.SYNCHRONIZED, false);
method.getModifierList().setModifierProperty(PsiModifier.NATIVE, false);
if (!targetClass.isInterface()) {
targetClass.getModifierList().setModifierProperty(PsiModifier.ABSTRACT, true);
}
removeFinalParameters(method);
}
public static boolean isInsideAnonymous(PsiElement element, PsiElement upTo) {
for (PsiElement current = element;
current != null && current != upTo;
current = current.getParent()) {
if (current instanceof PsiAnonymousClass) return true;
}
return false;
}
public static PsiExpression unparenthesizeExpression(PsiExpression expression) {
while (expression instanceof PsiParenthesizedExpression) {
final PsiExpression innerExpression = ((PsiParenthesizedExpression)expression).getExpression();
if (innerExpression == null) return expression;
expression = innerExpression;
}
return expression;
}
public static PsiExpression outermostParenthesizedExpression(PsiExpression expression) {
while (expression.getParent() instanceof PsiParenthesizedExpression) {
expression = (PsiParenthesizedExpression)expression.getParent();
}
return expression;
}
public static String getStringToSearch(PsiElement element, boolean nonJava) {
if (element instanceof PsiDirectory) { // normalize a directory to a corresponding package
final PsiPackage aPackage = ((PsiDirectory)element).getPackage();
if (aPackage != null) element = aPackage;
}
if (element instanceof PsiPackage) {
return nonJava ? ((PsiPackage)element).getQualifiedName() : ((PsiPackage)element).getName();
}
else if (element instanceof PsiClass) {
return nonJava ? ((PsiClass)element).getQualifiedName() : ((PsiClass)element).getName();
}
else if (element instanceof XmlTag) {
return ((XmlTag)element).getValue().getTrimmedText();
}
else if (element instanceof XmlAttribute) {
return ((XmlAttribute)element).getValue();
}
else if (element instanceof PsiNamedElement) {
return ((PsiNamedElement)element).getName();
}
else {
LOG.error("Unknown element type");
return null;
}
}
public static String getInnerClassNameForClassLoader(PsiClass aClass) {
final String qName = aClass.getQualifiedName();
return replaceDotsWithDollars(qName, aClass);
}
public static String replaceDotsWithDollars(final String qName, PsiClass aClass) {
StringBuffer qNameBuffer = new StringBuffer(qName);
int fromIndex = qNameBuffer.length();
PsiElement parent = aClass.getParent();
while (parent instanceof PsiClass) {
final int dotIndex = qNameBuffer.lastIndexOf(".", fromIndex);
if (dotIndex < 0) break;
qNameBuffer.replace(dotIndex, dotIndex + 1, "$");
fromIndex = dotIndex - 1;
parent = parent.getParent();
}
return qNameBuffer.toString();
}
public static String getNewInnerClassName(PsiClass aClass, String oldInnerClassName, String newName) {
if (!oldInnerClassName.endsWith(aClass.getName())) return newName;
StringBuffer buffer = new StringBuffer(oldInnerClassName);
buffer.replace(buffer.length() - aClass.getName().length(), buffer.length(), newName);
return buffer.toString();
}
public static boolean isSuperOrThisCall(PsiStatement statement, boolean testForSuper, boolean testForThis) {
if (!(statement instanceof PsiExpressionStatement)) return false;
PsiExpression expression = ((PsiExpressionStatement)statement).getExpression();
if (!(expression instanceof PsiMethodCallExpression)) return false;
final PsiReferenceExpression methodExpression = ((PsiMethodCallExpression)expression).getMethodExpression();
if (testForSuper) {
if ("super".equals(methodExpression.getText())) return true;
}
if (testForThis) {
if ("this".equals(methodExpression.getText())) return true;
}
return false;
}
public static void visitImplicitConstructorUsages(PsiClass aClass,
final ImplicitConstructorUsageVisitor implicitConstructorUsageVistor) {
PsiManager manager = aClass.getManager();
GlobalSearchScope projectScope = GlobalSearchScope.projectScope(manager.getProject());
final PsiClass[] inheritors = manager.getSearchHelper().findInheritors(aClass, projectScope, false);
for (PsiClass inheritor : inheritors) {
visitImplicitSuperConstructorUsages(inheritor, implicitConstructorUsageVistor, aClass);
}
}
public static void visitImplicitSuperConstructorUsages(PsiClass subClass,
final ImplicitConstructorUsageVisitor implicitConstructorUsageVistor,
PsiClass superClass) {
final PsiMethod baseDefaultConstructor = findDefaultConstructor (superClass);
final PsiMethod[] constructors = subClass.getConstructors();
if (constructors.length > 0) {
for (PsiMethod constructor : constructors) {
final PsiStatement[] statements = constructor.getBody().getStatements();
if (statements.length < 1 || !isSuperOrThisCall(statements[0], true, true)) {
implicitConstructorUsageVistor.visitConstructor(constructor, baseDefaultConstructor);
}
}
}
else {
implicitConstructorUsageVistor.visitClassWithoutConstructors(subClass);
}
}
private static PsiMethod findDefaultConstructor(final PsiClass aClass) {
final PsiMethod[] constructors = aClass.getConstructors();
for (PsiMethod constructor : constructors) {
if (constructor.getParameterList().getParameters().length == 0) return constructor;
}
return null;
}
public static interface ImplicitConstructorUsageVisitor {
void visitConstructor(PsiMethod constructor, PsiMethod baseConstructor);
void visitClassWithoutConstructors(PsiClass aClass);
}
public interface Graph<T> {
Set<T> getVertices();
Set<T> getTargets(T source);
}
/**
* Returns subset of <code>graph.getVertices()</code> that is a tranistive closure (by <code>graph.getTargets()<code>)
* of the following property: initialRelation.value() of vertex or <code>graph.getTargets(vertex)</code> is true.
* <p/>
* Note that <code>graph.getTargets()</code> is not neccesrily a subset of <code>graph.getVertex()</code>
*
* @param graph
* @param initialRelation
* @return subset of graph.getVertices()
*/
public static <T> Set<T> transitiveClosure(Graph<T> graph, Condition<T> initialRelation) {
Set<T> result = new HashSet<T>();
final Set<T> vertices = graph.getVertices();
boolean anyChanged;
do {
anyChanged = false;
for (T currentVertex : vertices) {
if (!result.contains(currentVertex)) {
if (!initialRelation.value(currentVertex)) {
Set<T> targets = graph.getTargets(currentVertex);
for (T currentTarget : targets) {
if (result.contains(currentTarget) || initialRelation.value(currentTarget)) {
result.add(currentVertex);
anyChanged = true;
break;
}
}
}
else {
result.add(currentVertex);
}
}
}
}
while (anyChanged);
return result;
}
public static boolean equivalentTypes(PsiType t1, PsiType t2, PsiManager manager) {
while (t1 instanceof PsiArrayType) {
if (!(t2 instanceof PsiArrayType)) return false;
t1 = ((PsiArrayType)t1).getComponentType();
t2 = ((PsiArrayType)t2).getComponentType();
}
if (t1 instanceof PsiPrimitiveType) {
if (t2 instanceof PsiPrimitiveType) {
return t1.equals(t2);
}
else {
return false;
}
}
return manager.areElementsEquivalent(PsiUtil.resolveClassInType(t1), PsiUtil.resolveClassInType(t2));
}
public static List<PsiVariable> collectReferencedVariables(PsiElement scope) {
final List<PsiVariable> result = new ArrayList<PsiVariable>();
scope.accept(new PsiRecursiveElementVisitor() {
public void visitReferenceExpression(PsiReferenceExpression expression) {
final PsiElement element = expression.resolve();
if (element instanceof PsiVariable) {
result.add((PsiVariable)element);
}
final PsiExpression qualifier = expression.getQualifierExpression();
if (qualifier != null) {
qualifier.accept(this);
}
}
});
return result;
}
public static boolean isModifiedInScope(PsiVariable variable, PsiElement scope) {
final PsiReference[] references = variable.getManager().getSearchHelper().findReferences(variable, new LocalSearchScope(scope), false);
for (PsiReference reference : references) {
if (isAssignmentLHS(reference.getElement())) return true;
}
return false;
}
public static String getNameOfReferencedParameter(PsiDocTag tag) {
LOG.assertTrue("param".equals(tag.getName()));
final PsiElement[] dataElements = tag.getDataElements();
if (dataElements.length < 1) return null;
return dataElements[0].getText();
}
public static void fixJavadocsForParams(PsiMethod method, Set<PsiParameter> newParameters) throws IncorrectOperationException {
final PsiDocComment docComment = method.getDocComment();
if (docComment == null) return;
final PsiParameter[] parameters = method.getParameterList().getParameters();
final PsiDocTag[] paramTags = docComment.findTagsByName("param");
if (parameters.length > 0 && newParameters.size() < parameters.length && paramTags.length == 0) return;
Map<PsiParameter, PsiDocTag> tagForParam = new HashMap<PsiParameter, PsiDocTag>();
for (PsiParameter parameter : parameters) {
boolean found = false;
for (PsiDocTag paramTag : paramTags) {
if (parameter.getName().equals(getNameOfReferencedParameter(paramTag))) {
tagForParam.put(parameter, paramTag);
found = true;
break;
}
}
if (!found && !newParameters.contains(parameter)) {
tagForParam.put(parameter, null);
}
}
List<PsiDocTag> newTags = new ArrayList<PsiDocTag>();
for (PsiParameter parameter : parameters) {
if (tagForParam.containsKey(parameter)) {
final PsiDocTag psiDocTag = tagForParam.get(parameter);
if (psiDocTag != null) {
newTags.add((PsiDocTag)psiDocTag.copy());
}
}
else {
newTags.add(method.getManager().getElementFactory().createParamTag(parameter.getName(), ""));
}
}
PsiDocTag anchor = paramTags.length > 0 ? paramTags[paramTags.length - 1] : null;
for (PsiDocTag psiDocTag : newTags) {
anchor = (PsiDocTag)docComment.addAfter(psiDocTag, anchor);
}
for (PsiDocTag paramTag : paramTags) {
paramTag.delete();
}
}
public static PsiDirectory createPackageDirectoryInSourceRoot(PackageWrapper aPackage, final VirtualFile sourceRoot)
throws IncorrectOperationException {
final PsiDirectory[] directories = aPackage.getDirectories();
for (PsiDirectory directory : directories) {
if (VfsUtil.isAncestor(sourceRoot, directory.getVirtualFile(), false)) {
return directory;
}
}
String qNameToCreate = qNameToCreateInSourceRoot(aPackage, sourceRoot);
final String[] shortNames = qNameToCreate.split("\\.");
PsiDirectory current = aPackage.getManager().findDirectory(sourceRoot);
LOG.assertTrue(current != null);
for (String shortName : shortNames) {
PsiDirectory subdirectory = current.findSubdirectory(shortName);
if (subdirectory == null) {
subdirectory = current.createSubdirectory(shortName);
}
current = subdirectory;
}
return current;
}
public static String qNameToCreateInSourceRoot(PackageWrapper aPackage, final VirtualFile sourceRoot)
throws IncorrectOperationException {
String targetQName = aPackage.getQualifiedName();
String sourceRootPackage = ProjectRootManager.getInstance(aPackage.getManager().getProject()).getFileIndex().getPackageNameByDirectory(sourceRoot);
if (sourceRootPackage == null || !targetQName.startsWith(sourceRootPackage)) {
throw new IncorrectOperationException("Cannot create package '" + targetQName + "' in source folder " + sourceRoot.getPresentableUrl());
}
String result = targetQName.substring(sourceRootPackage.length());
if (StringUtil.startsWithChar(result, '.')) result = result.substring(1); // remove initial '.'
return result;
}
public static PsiDirectory findPackageDirectoryInSourceRoot(PackageWrapper aPackage, final VirtualFile sourceRoot) {
final PsiDirectory[] directories = aPackage.getDirectories();
for (PsiDirectory directory : directories) {
if (VfsUtil.isAncestor(sourceRoot, directory.getVirtualFile(), false)) {
return directory;
}
}
String qNameToCreate;
try {
qNameToCreate = qNameToCreateInSourceRoot(aPackage, sourceRoot);
}
catch (IncorrectOperationException e) {
return null;
}
final String[] shortNames = qNameToCreate.split("\\.");
PsiDirectory current = aPackage.getManager().findDirectory(sourceRoot);
LOG.assertTrue(current != null);
for (String shortName : shortNames) {
PsiDirectory subdirectory = current.findSubdirectory(shortName);
if (subdirectory == null) {
return null;
}
current = subdirectory;
}
return current;
}
public static String calculatePsiElementDescriptionList(PsiElement[] elements, StringBuffer buffer) {
if (elements.length == 1) {
buffer.append(UsageViewUtil.getType(elements[0]));
buffer.append(' ');
buffer.append(UsageViewUtil.getDescriptiveName(elements[0]));
}
else {
Map<String, Ref<Integer>> map = new HashMap<String, Ref<Integer>>();
for (PsiElement element : elements) {
final String type = UsageViewUtil.getType(element);
Ref<Integer> ref = map.get(type);
if (ref == null) {
ref = Ref.create(new Integer(0));
map.put(type, ref);
}
ref.set(new Integer(ref.get().intValue() + 1));
}
final Set<Map.Entry<String, Ref<Integer>>> entries = map.entrySet();
int index = 0;
for (Map.Entry<String, Ref<Integer>> entry : entries) {
final String type = entry.getKey();
final int count = entry.getValue().get().intValue();
if (index > 0 && index + 1 < entries.size()) {
buffer.append(" ,");
}
else if (index > 0 && index == entries.size()) {
buffer.append(" and ");
}
buffer.append(count);
buffer.append(" ");
buffer.append(count > 1 ? type : StringUtil.pluralize(type));
}
}
return buffer.toString();
}
public static class ConditionCache <T> implements Condition<T> {
private final Condition<T> myCondition;
private final HashSet<T> myProcessedSet = new HashSet<T>();
private final HashSet<T> myTrueSet = new HashSet<T>();
public ConditionCache(Condition<T> condition) {
myCondition = condition;
}
public boolean value(T object) {
if (!myProcessedSet.contains(object)) {
myProcessedSet.add(object);
final boolean value = myCondition.value(object);
if (value) {
myTrueSet.add(object);
return true;
}
return false;
}
return myTrueSet.contains(object);
}
}
public static class IsInheritorOf implements Condition<PsiClass> {
private final PsiClass myClass;
private final ConditionCache<PsiClass> myConditionCache;
public IsInheritorOf(PsiClass aClass) {
myClass = aClass;
myConditionCache = new ConditionCache<PsiClass>(new MyCondition());
}
public boolean value(PsiClass object) {
return myConditionCache.value(object);
}
private class MyCondition implements Condition<PsiClass> {
public boolean value(PsiClass aClass) {
return aClass.isInheritor(myClass, true);
}
}
}
public static class IsDescendantOf implements Condition<PsiClass> {
private final PsiClass myClass;
private final ConditionCache<PsiClass> myConditionCache;
public IsDescendantOf(PsiClass aClass) {
myClass = aClass;
myConditionCache = new ConditionCache<PsiClass>(new Condition<PsiClass>() {
public boolean value(PsiClass aClass) {
return InheritanceUtil.isInheritorOrSelf(aClass, myClass, true);
}
});
}
public boolean value(PsiClass aClass) {
return myConditionCache.value(aClass);
}
}
public static void processIncorrectOperation(final Project project, IncorrectOperationException e) {
final String message = e.getMessage();
final int index = message != null ? message.indexOf("java.io.IOException") : -1;
if (index >= 0) {
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
Messages.showMessageDialog(project, message.substring(index + "java.io.IOException".length()), "Error",
Messages.getErrorIcon());
}
});
}
else {
LOG.error(e);
}
}
public static void analyzeModuleConflicts(Project project,
Collection<? extends PsiElement> scope,
final UsageInfo[] usages,
PsiElement target,
final Collection<String> conflicts) {
if (scope == null) return;
final VirtualFile vFile;
if (!(target instanceof PsiDirectory)) {
vFile = target.getContainingFile().getVirtualFile();
}
else {
vFile = ((PsiDirectory)target).getVirtualFile();
}
if (vFile == null) return;
analyzeModuleConflicts(project, scope, usages, vFile, conflicts);
}
public static void analyzeModuleConflicts(Project project,
final Collection<? extends PsiElement> scopes,
final UsageInfo[] usages,
final VirtualFile vFile,
final Collection<String> conflicts) {
if (scopes == null) return;
for (final PsiElement scope : scopes) {
if (scope instanceof PsiPackage || scope instanceof PsiDirectory) return;
}
final Module targetModule = ModuleUtil.getModuleForFile(project, vFile);
if (targetModule == null) return;
final GlobalSearchScope resolveScope = GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(targetModule);
final HashSet<PsiElement> reported = new HashSet<PsiElement>();
for (final PsiElement scope : scopes) {
scope.accept(new PsiRecursiveElementVisitor() {
public void visitReferenceElement(PsiJavaCodeReferenceElement reference) {
super.visitReferenceElement(reference);
final PsiElement resolved = reference.resolve();
if (resolved != null && !reported.contains(resolved)
&& !isAncestor(resolved, scopes)
&& !PsiSearchScopeUtil.isInScope(resolveScope, resolved)) {
final String scopeDescription = ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(ConflictsUtil.getContainer(reference),
true));
final String message =
ConflictsUtil.capitalize(ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(resolved, true))) +
", referenced in " + scopeDescription +
", will not be accessible in module " +
ConflictsUtil.htmlEmphasize(targetModule.getName());
conflicts.add(message);
reported.add(resolved);
}
}
});
}
NextUsage:
for (UsageInfo usage : usages) {
if (usage instanceof MoveRenameUsageInfo) {
final MoveRenameUsageInfo moveRenameUsageInfo = ((MoveRenameUsageInfo)usage);
final PsiElement element = usage.getElement();
if (element != null &&
PsiTreeUtil.getParentOfType(element, PsiImportStatement.class, false) == null) {
for (PsiElement scope : scopes) {
if (PsiTreeUtil.isAncestor(scope, element, false)) continue NextUsage;
}
final GlobalSearchScope resolveScope1 = element.getResolveScope();
if (!resolveScope1.isSearchInModuleContent(targetModule)) {
final PsiMember container = ConflictsUtil.getContainer(element);
LOG.assertTrue(container != null);
final String scopeDescription = ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(container,
true));
Module module = ProjectRootManager.getInstance(project).getFileIndex().getModuleForFile(element.getContainingFile().getVirtualFile());
final String message =
ConflictsUtil.capitalize(ConflictsUtil.htmlEmphasize(ConflictsUtil.getDescription(moveRenameUsageInfo.referencedElement, true))) +
", referenced in " + scopeDescription +
", will not be accessible from module " +
ConflictsUtil.htmlEmphasize(module.getName());
conflicts.add(message);
}
}
}
}
}
private static boolean isAncestor(final PsiElement resolved, final Collection<? extends PsiElement> scopes) {
for (final PsiElement scope : scopes) {
if (PsiTreeUtil.isAncestor(scope, resolved, false)) return true;
}
return false;
}
}
|
wrong assert
|
source/com/intellij/refactoring/util/RefactoringUtil.java
|
wrong assert
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.