answer
stringlengths 17
10.2M
|
|---|
package com.philliphsu.bottomsheetpickers.view.numberpad;
import java.text.DateFormatSymbols;
import static com.philliphsu.bottomsheetpickers.view.numberpad.AmPmStates.AM;
import static com.philliphsu.bottomsheetpickers.view.numberpad.AmPmStates.HRS_24;
import static com.philliphsu.bottomsheetpickers.view.numberpad.AmPmStates.PM;
import static com.philliphsu.bottomsheetpickers.view.numberpad.AmPmStates.UNSPECIFIED;
import static com.philliphsu.bottomsheetpickers.view.numberpad.DigitwiseTimeModel.MAX_DIGITS;
final class NumberPadTimePickerPresenter implements
INumberPadTimePicker.Presenter,
DigitwiseTimeModel.OnInputChangeListener {
// TODO: Delete this if we're not setting a capacity.
// Formatted time string has a maximum of 8 characters
// in the 12-hour clock, e.g 12:59 AM. Although the 24-hour
// clock should be capped at 5 characters, the difference
// is not significant enough to deal with the separate cases.
private static final int MAX_CHARS = 8;
// Constant for converting text digits to numeric digits in base-10.
private static final int BASE_10 = 10;
private final DigitwiseTimeModel timeModel = new DigitwiseTimeModel(this);
private final DigitwiseTimeParser timeParser = new DigitwiseTimeParser(timeModel);
// TODO: Delete setting of capacity.
private final StringBuilder mFormattedInput = new StringBuilder(MAX_CHARS);
private final INumberPadTimePicker.View view;
private @AmPmStates.AmPmState int mAmPmState = UNSPECIFIED;
private final boolean mIs24HourMode;
@Deprecated // TODO: Delete this! THis should not make it into release.
NumberPadTimePickerPresenter(INumberPadTimePicker.View view) {
this(view, false);
}
NumberPadTimePickerPresenter(INumberPadTimePicker.View view, boolean is24HourMode) {
this.view = view;
mIs24HourMode = is24HourMode;
}
@Override
public void onNumberKeyClick(CharSequence numberKeyText) {
timeModel.storeDigit(Integer.parseInt(numberKeyText.toString()));
}
@Override
public void onAltKeyClick(CharSequence altKeyText) {
// Manually insert special characters for 12-hour clock
if (!is24HourFormat()) {
if (count() <= 2) {
// The colon is inserted for you
insertDigits(0, 0);
}
// text is AM or PM, so include space before
String ampm = altKeyText.toString();
// TODO: When we're finalizing the code, we probably don't need to
// format this in anymore; just tell the view to update its am/pm
// display directly.
// However, we currently need to leave this in for the backspace
// logic to work correctly.
mFormattedInput.append(' ').append(ampm);
String am = new DateFormatSymbols().getAmPmStrings()[0];
mAmPmState = ampm.equalsIgnoreCase(am) ? AM : PM;
// Digits will be shown for you on insert, but not AM/PM
view.updateAmPmDisplay(ampm);
} else {
// Assuming the text is one of ":00" or ":30", this
// evaluates to 2.
final int numDigits = altKeyText.length() - 1;
int[] digits = new int[numDigits];
// charAt(0) is the colon, so skip i = 0.
// We are only interested in storing the digits.
for (int i = 1; i < altKeyText.length(); i++) {
// The array and the text do not have the same lengths,
// so the iterator value does not correspond to the
// array index directly
digits[i - 1] = Character.digit(altKeyText.charAt(i), BASE_10);
}
// Colon is added for you
insertDigits(digits);
mAmPmState = HRS_24;
}
updateNumpadStates();
}
@Override
public void onBackspaceClick() {
final int len = mFormattedInput.length();
if (!mIs24HourMode && mAmPmState != UNSPECIFIED) {
mAmPmState = UNSPECIFIED;
mFormattedInput.delete(mFormattedInput.indexOf(" "), len);
view.updateAmPmDisplay(null);
/* No digit was actually deleted, so there is no need to
* update the time display. */
updateNumpadStates();
} else {
timeModel.removeDigit();
}
}
@Override
public boolean onBackspaceLongClick() {
return timeModel.clearDigits();
}
@Override
public void onShowTimePicker(/*TODO: Require is24HourMode param*/) {
view.updateTimeDisplay(null);
view.updateAmPmDisplay(null);
updateNumpadStates();
// TODO: Update number key states.
// TODO: Set the alt button texts according to is24HourMode.
}
@Override
public void onDigitStored(int digit) {
// Append the new digit(s) to the formatter
updateFormattedInputOnDigitInserted(digit);
view.updateTimeDisplay(mFormattedInput.toString());
updateNumpadStates();
}
@Override
public void onDigitRemoved(int digit) {
updateFormattedInputOnDigitDeleted();
view.updateTimeDisplay(mFormattedInput.toString());
updateNumpadStates();
}
@Override
public void onDigitsCleared() {
mFormattedInput.delete(0, mFormattedInput.length());
mAmPmState = UNSPECIFIED;
updateNumpadStates(); // TOneverDO: before resetting mAmPmState to UNSPECIFIED
view.updateTimeDisplay(null);
if (!mIs24HourMode) {
view.updateAmPmDisplay(null);
}
}
private int count() {
return timeModel.count();
}
private boolean is24HourFormat() {
return mIs24HourMode;
}
private int getInput() {
return timeModel.getInput();
}
private void enable(int start, int end) {
view.setNumberKeysEnabled(start, end);
}
private void insertDigits(int... digits) {
timeModel.storeDigits(digits);
}
private void updateNumpadStates() {
// TOneverDO: after updateNumberKeysStates(), esp. if clock is 12-hour,
// because it calls enable(0, 0), which checks if the alt buttons have been
// disabled as well before firing the onInputDisabled().
updateAltButtonStates();
updateBackspaceState();
updateNumberKeysStates();
updateFabState();
}
private void updateFabState() {
view.setOkButtonEnabled(timeParser.checkTimeValid(mAmPmState));
}
private void updateBackspaceState() {
view.setBackspaceEnabled(count() > 0);
}
private void updateAltButtonStates() {
if (count() == 0) {
// No input, no access!
view.setLeftAltKeyEnabled(false);
view.setRightAltKeyEnabled(false);
} else if (count() == 1) {
// Any of 0-9 inputted, always have access in either clock.
view.setLeftAltKeyEnabled(true);
view.setRightAltKeyEnabled(true);
} else if (count() == 2) {
// Any 2 digits that make a valid hour for either clock are eligible for access
int time = getInput();
boolean validTwoDigitHour = is24HourFormat() ? time <= 23 : time >= 10 && time <= 12;
view.setLeftAltKeyEnabled(validTwoDigitHour);
view.setRightAltKeyEnabled(validTwoDigitHour);
} else if (count() == 3) {
if (is24HourFormat()) {
// For the 24-hour clock, no access at all because
// two more digits (00 or 30) cannot be added to 3 digits.
view.setLeftAltKeyEnabled(false);
view.setRightAltKeyEnabled(false);
} else {
// True for any 3 digits, if AM/PM not already entered
boolean enabled = mAmPmState == UNSPECIFIED;
view.setLeftAltKeyEnabled(enabled);
view.setRightAltKeyEnabled(enabled);
}
} else if (count() == MAX_DIGITS) {
// If all 4 digits are filled in, the 24-hour clock has absolutely
// no need for the alt buttons. However, The 12-hour clock has
// complete need of them, if not already used.
boolean enabled = !is24HourFormat() && mAmPmState == UNSPECIFIED;
view.setLeftAltKeyEnabled(enabled);
view.setRightAltKeyEnabled(enabled);
}
}
private void updateNumberKeysStates() {
int cap = 10; // number of buttons
boolean is24hours = is24HourFormat();
if (count() == 0) {
enable(is24hours ? 0 : 1, cap);
return;
} else if (count() == MAX_DIGITS) {
enable(0, 0);
return;
}
int time = getInput();
if (is24hours) {
if (count() == 1) {
enable(0, time < 2 ? cap : 6);
} else if (count() == 2) {
enable(0, time % 10 >= 0 && time % 10 <= 5 ? cap : 6);
} else if (count() == 3) {
if (time >= 236) {
enable(0, 0);
} else {
enable(0, time % 10 >= 0 && time % 10 <= 5 ? cap : 0);
}
}
} else {
if (count() == 1) {
if (time == 0) {
throw new IllegalStateException("12-hr format, zeroth digit = 0?");
} else {
enable(0, 6);
}
} else if (count() == 2 || count() == 3) {
if (time >= 126) {
enable(0, 0);
} else {
if (time >= 100 && time <= 125 && mAmPmState != UNSPECIFIED) {
enable(0, 0);
} else {
enable(0, time % 10 >= 0 && time % 10 <= 5 ? cap : 0);
}
}
}
}
}
private void updateFormattedInputOnDigitInserted(int newDigit) {
mFormattedInput.append(newDigit);
// Add colon if necessary, depending on how many digits entered so far
if (count() == 3) {
// Insert a colon
int digits = getInput();
if (digits >= 60 && digits < 100 || digits >= 160 && digits < 200) {
// From 060-099 (really only to 095, but might as well go up to 100)
// From 160-199 (really only to 195, but might as well go up to 200),
// time does not exist if colon goes at pos. 1
mFormattedInput.insert(2, ':');
// These times only apply to the 24-hour clock, and if we're here,
// either clock.
// The 12-hour clock can only have mAmPmState set when AM/PM are clicked.
} else {
// A valid time exists if colon is at pos. 1
mFormattedInput.insert(1, ':');
// We can set mAmPmState here (and not in the above case) because
if (is24HourFormat()) {
mAmPmState = HRS_24;
}
}
} else if (count() == MAX_DIGITS) {
int colonAt = mFormattedInput.indexOf(":");
// Since we now batch update the formatted input whenever
// digits are inserted, the colon may legitimately not be
// present in the formatted input when this is initialized.
if (colonAt != -1) {
// Colon needs to move, so remove the colon previously added
mFormattedInput.deleteCharAt(colonAt);
}
mFormattedInput.insert(2, ':');
if (is24HourFormat()) {
mAmPmState = HRS_24;
}
}
}
private void updateFormattedInputOnDigitDeleted() {
int len = mFormattedInput.length();
mFormattedInput.delete(len - 1, len);
if (count() == 3) {
int value = getInput();
// Move the colon from its 4-digit position to its 3-digit position,
// unless doing so gives an invalid time.
// e.g. 17:55 becomes 1:75, which is invalid.
// All 3-digit times in the 12-hour clock at this point should be
// valid. The limits <=155 and (>=200 && <=235) are really only
// imposed on the 24-hour clock, and were chosen because 4-digit times
// in the 24-hour clock can only go up to 15:5[0-9] or be within the range
// [20:00, 23:59] if they are to remain valid when they become three digits.
// The is24HourFormat() check is therefore unnecessary.
if (value <= 155 || value >= 200 && value <= 235) {
mFormattedInput.deleteCharAt(mFormattedInput.indexOf(":"));
mFormattedInput.insert(1, ":");
} else {
// previously [16:00, 19:59]
mAmPmState = UNSPECIFIED;
}
} else if (count() == 2) {
// Remove the colon
mFormattedInput.deleteCharAt(mFormattedInput.indexOf(":"));
// No time can be valid with only 2 digits in either system.
// I don't think we actually need this, but it can't hurt?
mAmPmState = UNSPECIFIED;
}
}
}
|
package Alg.Kernelization;
import org.jgrapht.Graphs;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.Multigraph;
import org.jgrapht.alg.util.UnionFind;
import sun.awt.image.ImageWatched;
import java.util.*;
public class Approximation {
/**
* Helper function to produce clean graphs (with degree >= 2)
*
* @param solution The solution from the current iteration (G-F)
* @param vertex Vertex from the current iteration
* @param degree Degree of the vertex in the current iteration
*/
public static ReductionSolution cleanUp(ReductionSolution solution, Integer vertex, int degree)
{
// Rule 0 & Rule 1
if (degree <= 1) {
Kernelization.removeVertex(solution, vertex, false);
}
return solution;
}
/**
* Helper function to find gamma value for the semidisjoint cycle case
*
* @param graph The graph G-F from the current iteration
* @param semiDisjointCycle The vertices from the semidisjoint cycle C of the current iteration
*/
public static float gammaCase1(Multigraph<Integer, DefaultEdge> graph, List<WeightedVertex> semiDisjointCycle)
{
float gamma = semiDisjointCycle.get(0).weight;
for (WeightedVertex c : semiDisjointCycle) {
if (c.weight < gamma) {
gamma = c.weight;
}
}
return gamma;
}
/**
* Helper function to find gamma value for the case that no semidisjoint cycle was found
*
* @param graph The graph G-F from the current iteration
*/
public static float gammaCase2(Multigraph<Integer, DefaultEdge> graph, Integer[] vertices)
{
int initializeDegree = -1;
WeightedVertex initializeVertex = new WeightedVertex(-1);
for(int i=0; i<graph.vertexSet().size(); i++) {
if(graph.containsVertex(vertices[i])) {
initializeDegree = graph.degreeOf(vertices[i]);
initializeVertex = new WeightedVertex(vertices[i]);
break;
}
}
float gamma = initializeVertex.weight / (initializeDegree - 1); // initialize gamma value to compare with
for (Integer v : vertices) {
if (!graph.containsVertex(v)) {
continue;
}
int degree = graph.degreeOf(v);
WeightedVertex wv = new WeightedVertex(v);
if (gamma > wv.weight / (degree - 1)) { // set new min gamma value
gamma = (wv.weight / (degree - 1));
}
}
return gamma;
}
public static ReductionSolution determineFVS(Multigraph<Integer, DefaultEdge> ingraph, boolean cloneGraph, Integer[] weightedVertices, int weight) // changed from boolean to int
{
Multigraph<Integer, DefaultEdge> graph = cloneGraph ? (Multigraph<Integer, DefaultEdge>) ingraph.clone(): ingraph;
Deque<Integer> STACK = new ArrayDeque();
Integer[] vertices = (graph.vertexSet()).toArray(new Integer[graph.vertexSet().size()]);
return Approximation.determineFVS(ingraph, graph, vertices, STACK, weightedVertices, weight);
}
/**
* Determine the FVS (superset) of G (see FEEDBACK pseudo-code from paper)
*
* @param graph
* @param vertices
* @param weightedVertices
* @param weight
* @return
*/
public static ReductionSolution determineFVS(Multigraph<Integer, DefaultEdge> ingraph, Multigraph<Integer, DefaultEdge> graph, Integer[] vertices, Deque<Integer> STACK, Integer[] weightedVertices, int weight){
float gammaCase1, gammaCase2;
ReductionSolution solution = new ReductionSolution();
solution.reducedGraph = graph;
/**
* Iterative reduction of G to G-F by checking for semidisjoint cycles.
*
* We fill the STACK with all vertices with weight reduced to 0. After that, we remove the vertices from
* this STACK that turn out to be redundant and add the rest to our solution F.
*/
for (Integer v : vertices) {
if (!solution.reducedGraph.containsVertex(v)) {
continue;
}
WeightedVertex u = new WeightedVertex(v);
int degree = solution.reducedGraph.degreeOf(u.id);
if (degree <= 1) { // safety check; however, this should never occur
continue;
}
// we now check if G contains semidisjoint cycles [SDC] (plural)
if (degree == 2) {
List<WeightedVertex> semiDisjointCycle = new ArrayList();
List<Integer> leftNeighbors;
List<Integer> rightNeighbors;
List<Integer> neighbors = Graphs.neighborListOf(solution.reducedGraph, v);
WeightedVertex leftNeighbor = new WeightedVertex(neighbors.get(0));
WeightedVertex rightNeighbor = new WeightedVertex(neighbors.get(1));
// Create new vertex placeholders that will be overwritten in the loops
Integer predecessor = u.id;
Integer vertexPlaceholder = -1;
// prematurely add vertices to our potential semidisjointCycle container
semiDisjointCycle.add(u);
semiDisjointCycle.add(leftNeighbor);
semiDisjointCycle.add(rightNeighbor);
if (leftNeighbor == rightNeighbor) { // we have a self-loop -> remove it
Kernelization.removeVertex(solution, u.id, false);
Kernelization.removeVertex(solution, leftNeighbor.id, true);
} else { // check if degrees of both neighbors uplod the properties of an SDC
int degreeLeftNeighbor = solution.reducedGraph.degreeOf(leftNeighbor.id);
WeightedVertex l1; // placeholder for one of the neighbors of leftNeighbor
WeightedVertex l2; // placeholder for one of the neighbors of leftNeighbor
WeightedVertex leftException; // placeholder for leftNeighbor.neighbor that violates SDC rules
int degreeRightNeighbor = solution.reducedGraph.degreeOf(rightNeighbor.id);
WeightedVertex r1; // placeholder for one of the neighbors of rightNeighbor
WeightedVertex r2; // placeholder for one of the neighbors of rightNeighbor
WeightedVertex rightException; // placeholder for rightNeighbor.neighbor that violates SDC rules
while (degreeLeftNeighbor == 2) { // still potential vertex contained SDC?
leftNeighbors = Graphs.neighborListOf(solution.reducedGraph, leftNeighbor.id);
vertexPlaceholder = leftNeighbor.id;
l1 = new WeightedVertex(leftNeighbors.get(0));
l2 = new WeightedVertex(leftNeighbors.get(1));
if (l1.id != predecessor) { // make sure the neighbor we process wasn't already looked at before
degreeLeftNeighbor = ingraph.degreeOf(l1.id); // get degree of v in original graph G
semiDisjointCycle.add(l1);
leftNeighbor = l1; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
} else {
degreeLeftNeighbor = ingraph.degreeOf(l2.id);
semiDisjointCycle.add(l2);
leftNeighbor = l2; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
}
predecessor = vertexPlaceholder; // remember vertex used in previous iteration to avoid reviewing it again
}
leftException = leftNeighbor; // semidisjoint cycle exception found
predecessor = u.id; //reset value for rightNeighbor-loop
while (degreeRightNeighbor == 2) { // still potential vertex contained SDC?
rightNeighbors = Graphs.neighborListOf(solution.reducedGraph, rightNeighbor.id);
vertexPlaceholder = rightNeighbor.id;
r1 = new WeightedVertex(rightNeighbors.get(0));
r2 = new WeightedVertex(rightNeighbors.get(1));
if (r1.id != predecessor) { // make sure the neighbor we process wasn't already looked at before
degreeRightNeighbor = ingraph.degreeOf(r1.id); // get degree of v in original graph G
semiDisjointCycle.add(r1);
rightNeighbor = r1; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
} else {
degreeRightNeighbor = ingraph.degreeOf(r2.id);
semiDisjointCycle.add(r2);
rightNeighbor = r2; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
}
predecessor = vertexPlaceholder; // remember vertex used in previous iteration to avoid reviewing it again
}
rightException = rightNeighbor; // semidisjoint cycle exception found
// An SDC may contain at most 1 exception, so we must have that (leftException == rightException)
if (leftException == rightException) { // Case 1: SDC found in current graph
gammaCase1 = gammaCase1(solution.reducedGraph, semiDisjointCycle);
for (WeightedVertex c : semiDisjointCycle) { // for all members of the cycle
for (Integer w : vertices) {
if (!solution.reducedGraph.containsVertex(w)) {
continue;
}
if (w == c.id) {
c.weight = c.weight - gammaCase1;
if (c.weight <= 0) {
STACK.push(c.id); // add vertex to STACK
solution.reducedGraph.removeVertex(c.id); // update G-F
}
}
}
}
} else { // Case 2: no SDC found in current graph
gammaCase2 = gammaCase2(solution.reducedGraph, vertices);
u.weight = u.weight - gammaCase2 * (degree - 1); // only for the observed vertex
if (u.weight <= 0) {
if (solution.reducedGraph.containsVertex(u.id)) {
STACK.push(u.id);
solution.reducedGraph.removeVertex(u.id); // update G-F
}
}
}
semiDisjointCycle.clear(); // clear collection for next iteration
} // endif (left != right)
} else { // endif (degree == 2)
// in case we know for certain that the vertex does not belong to an SDC, immediately do:
gammaCase2 = gammaCase2(solution.reducedGraph, vertices);
u.weight = u.weight - gammaCase2 * (degree - 1); // only for the observed vertex
if (u.weight <= 0) {
if (solution.reducedGraph.containsVertex(u.id)) {
STACK.push(u.id);
solution.reducedGraph.removeVertex(u.id); // update G-F
}
}
}
// cleanup G (again) until no more vertices with degrees <=1 exist
cleanUp(solution, v, degree);
}// endfor (v:vertices)
// At this point, G-F contains no (more) SDC and STACK contains all potential vertices from the solution.
// into G-F would create a cycle. We do this using UnionFind
UnionFind<Integer> union = new UnionFind(solution.reducedGraph.vertexSet());
while (!STACK.isEmpty()){
Integer currentVertex = STACK.peek(); // view top item from stack
// get all edges from current vertex in the original graph G
LinkedList<DefaultEdge> edges = new LinkedList(ingraph.edgesOf(currentVertex));
// get all corresponding neigbors n and, if n in G-F, store them in collection: neighbors
List<Integer> neighbors = new ArrayList();
for (DefaultEdge e:edges) {
neighbors.add(Graphs.getOppositeVertex(ingraph, e, currentVertex));
}
// check if v is connected to the same component more than once using a treeset (duplicates)
TreeSet<Integer> neighborComponents = new TreeSet();
boolean hasDuplicates = false;
// check for multiple neighbors of currentVertex that are members of the same component
for ( Integer n:neighbors ) {
if(solution.reducedGraph.containsVertex(n)) hasDuplicates |= !neighborComponents.add(union.find(n));
if (hasDuplicates) break; // we found a loop
}
// in case we didn't find a loop, currentVertex is redundant
if(!hasDuplicates){
union.addElement(currentVertex); // add currentVertex back into G-F
for ( Integer n:neighbors ) {
if (solution.reducedGraph.containsVertex(n)) {
union.union(currentVertex, n); // connect the vertex to its neighbors in G-F (UnionFind components)
}
}
solution.reducedGraph.addVertex(currentVertex); // add vertex back to G-F
} else { //if we found a loop, currentVertex is essential
solution.verticesToRemoved.add(currentVertex); // add currentVertex to solution F
}
STACK.pop();
}
// Next we update any vertex whose weight was artificially increased for certain reduction rules
int c = 0;
for (int v : solution.verticesToRemoved ) {
for (int w : weightedVertices){
if(v == w) c++;
}
}
solution.totalFVSweight = solution.verticesToRemoved.size() + c*(weight-1);
return solution;
}
/**
* Determine a weighted 2-approximation FVS of a graph G, where vertices have weight 1, or weight w if specified
*
* @param ingraph The graph G
* @param weightedVertices Vertices with weight w
* @param weight Weight w
* @return
*/
public static ReductionSolution determineFVS2(Multigraph<Integer, DefaultEdge> ingraph, Integer[] weightedVertices, int weight){
ReductionSolution solution = new ReductionSolution();
// VerticesToRemoved acts as solution set F
solution.reducedGraph = (Multigraph<Integer, DefaultEdge>) ingraph.clone();
// Skip initial filling of F, by definition, no weights are 0
Stack<Integer> stack = new Stack<>(); // The STACK
cleanUp2(solution.reducedGraph, new LinkedList<>(solution.reducedGraph.vertexSet()));
for(Integer v: ingraph.vertexSet()){
}
//Determine solution weight
solution.totalFVSweight = 0;
for (Integer v: solution.verticesToRemoved){
solution.totalFVSweight++;
for (Integer u: weightedVertices){
if (v.equals(u)){
solution.totalFVSweight += weight-1;
}
}
}
return solution;
}
public static void cleanUp2(Multigraph<Integer, DefaultEdge> graph, LinkedList<Integer> relevantVertices){
while (!relevantVertices.isEmpty()) {
Integer current = relevantVertices.pop();
if(graph.containsVertex(current)){
if(graph.degreeOf(current) <= 1){
relevantVertices.addAll(Graphs.neighborListOf(graph, current));
graph.removeVertex(current);
}
}
}
}
}
|
package com.ge.research.sadl.jena.reasoner;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.activation.DataSource;
import org.apache.jena.atlas.web.HttpException;
//import org.apache.jena.larq.IndexBuilderString;
//import org.apache.jena.larq.IndexLARQ;
//import org.apache.jena.larq.LARQ;
//import org.apache.lucene.index.IndexReader;
//import org.apache.lucene.index.IndexReader.FieldOption;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ge.research.sadl.jena.reasoner.builtin.CancellableBuiltin;
import com.ge.research.sadl.jena.translator.JenaTranslatorPlugin;
import com.ge.research.sadl.jena.translator.JenaTranslatorPlugin.TranslationTarget;
import com.ge.research.sadl.model.Explanation;
import com.ge.research.sadl.model.ImportMapping;
import com.ge.research.sadl.model.gp.BuiltinElement;
import com.ge.research.sadl.model.gp.GraphPatternElement;
import com.ge.research.sadl.model.gp.Junction;
import com.ge.research.sadl.model.gp.NamedNode;
import com.ge.research.sadl.model.gp.RDFTypeNode;
import com.ge.research.sadl.model.gp.NamedNode.NodeType;
import com.ge.research.sadl.model.gp.Node;
import com.ge.research.sadl.model.gp.TripleElement;
import com.ge.research.sadl.model.gp.VariableNode;
import com.ge.research.sadl.reasoner.BuiltinInfo;
import com.ge.research.sadl.reasoner.ConfigurationException;
import com.ge.research.sadl.reasoner.ConfigurationItem;
import com.ge.research.sadl.reasoner.ConfigurationManagerFactory;
import com.ge.research.sadl.reasoner.ConfigurationOption;
import com.ge.research.sadl.reasoner.IConfigurationManager;
import com.ge.research.sadl.reasoner.ITranslator;
import com.ge.research.sadl.reasoner.InvalidDerivationException;
import com.ge.research.sadl.reasoner.InvalidNameException;
import com.ge.research.sadl.reasoner.ModelError;
import com.ge.research.sadl.reasoner.ModelError.ErrorType;
import com.ge.research.sadl.reasoner.QueryCancelledException;
import com.ge.research.sadl.reasoner.InferenceCanceledException;
import com.ge.research.sadl.reasoner.QueryParseException;
import com.ge.research.sadl.reasoner.Reasoner;
import com.ge.research.sadl.reasoner.ReasonerNotFoundException;
import com.ge.research.sadl.reasoner.ReasonerTiming;
import com.ge.research.sadl.reasoner.ResultSet;
import com.ge.research.sadl.reasoner.RuleNotFoundException;
import com.ge.research.sadl.reasoner.SadlJenaModelGetter;
import com.ge.research.sadl.reasoner.TripleNotFoundException;
import com.ge.research.sadl.reasoner.ConfigurationItem.NameValuePair;
import com.ge.research.sadl.utils.SadlUtils;
import com.ge.research.sadl.utils.StringDataSource;
import com.ge.research.sadl.utils.UtilsForJena;
import com.hp.hpl.jena.datatypes.DatatypeFormatException;
import com.hp.hpl.jena.datatypes.xsd.XSDDateTime;
import com.hp.hpl.jena.datatypes.xsd.XSDDuration;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node_Literal;
import com.hp.hpl.jena.graph.Node_URI;
import com.hp.hpl.jena.graph.Node_Variable;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.ontology.OntClass;
import com.hp.hpl.jena.ontology.OntDocumentManager;
import com.hp.hpl.jena.ontology.OntDocumentManager.ReadFailureHandler;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.ontology.OntProperty;
import com.hp.hpl.jena.ontology.Ontology;
import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.Syntax;
import com.hp.hpl.jena.rdf.model.InfModel;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.ModelGetter;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.RDFReader;
import com.hp.hpl.jena.rdf.model.RDFWriter;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.reasoner.Derivation;
import com.hp.hpl.jena.reasoner.InfGraph;
import com.hp.hpl.jena.reasoner.TriplePattern;
import com.hp.hpl.jena.reasoner.ValidityReport;
import com.hp.hpl.jena.reasoner.ValidityReport.Report;
import com.hp.hpl.jena.reasoner.rulesys.Builtin;
import com.hp.hpl.jena.reasoner.rulesys.BuiltinRegistry;
import com.hp.hpl.jena.reasoner.rulesys.ClauseEntry;
import com.hp.hpl.jena.reasoner.rulesys.Functor;
import com.hp.hpl.jena.reasoner.rulesys.GenericRuleReasoner;
import com.hp.hpl.jena.reasoner.rulesys.Node_RuleVariable;
import com.hp.hpl.jena.reasoner.rulesys.Rule;
import com.hp.hpl.jena.reasoner.rulesys.Rule.ParserException;
import com.hp.hpl.jena.reasoner.rulesys.RuleDerivation;
import com.hp.hpl.jena.reasoner.rulesys.builtins.Product;
import com.hp.hpl.jena.shared.RulesetNotFoundException;
import com.hp.hpl.jena.util.FileManager;
import com.hp.hpl.jena.util.PrintUtil;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import com.hp.hpl.jena.vocabulary.OWL;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
public class JenaReasonerPlugin extends Reasoner{
protected static final Logger logger = LoggerFactory.getLogger(JenaReasonerPlugin.class);
public static String ReasonerFamily="Jena-Based";
public static final String version = "$Revision: 1.18 $";
private static String ReasonerCategory = "Jena";
public static final String pModelSpec = "pModelSpec";
public static final String pTimeOut = "pTimeOut";
public static final String pRuleMode = "pRuleMode";
public static final String pOWLTranslation = "pOWLTranslation";
public static final String pTransitiveClosureCaching = "pTransitiveClosureCaching";
public static final String pTrace = "pTrace";
public static final String pUseLuceneIndexer = "pUseLuceneIndexer";
public static final String pLuceneIndexerClass = "pLuceneIndexerClass";
public static final String pDerivationLogging = "pDerivationLogging";
protected static final String OWL_MEM_MICRO_RULE = "OWL_MEM_MICRO_RULE";
protected static final String OWL_MEM_MINI_RULE = "OWL_MEM_MINI_RULE";
protected static final String OWL_MEM = "OWL_MEM";
protected static final String OWL_MEM_RDFS = "OWL_MEM_RDFS";
protected static final String OWL_MEM_RULE = "OWL_MEM_RULE";
protected static final String OWL_MEM_TRANS = "OWL_MEM_TRANS";
protected static final String OWL_LITE_MEM_TRANS = "OWL_LITE_MEM_TRANS";
protected static final String OWL_LITE_MEM = "OWL_LITE_MEM";
protected static final String OWL_LITE_MEM_RULE = "OWL_LITE_MEM_RULES";
protected static final String OWL_DL_MEM_RDFS = "OWL_DL_MEM_RDFS";
protected static final String OWL_DL_MEM_RULE = "OWL_DL_MEM_RULE";
protected static final String OWL_LITE_MEM_RDFS = "OWL_LITE_MEM_RDFS";
protected static final String OWL_DL_MEM_TRANS = "OWL_DL_MEM_TRANS";
protected static final String OWL_DL_MEM = "OWL_DL_MEM";
protected static final String RDFS_MEM = "RDFS_MEM";
protected static final String RDFS_MEM_TRANS = "RDFS_MEM_TRANS";
protected static final String RDFS_MEM_RDFS = "RDFS_MEM_RDFS";
protected static final String DERIVATION_NONE = "None";
protected static final String DERIVATION_SHALLOW = "Shallow";
protected static final String DERIVATION_DEEP = "Deep";
public static final String TIMING_LOAD_MODEL="LoadModelTime";
public static final String TIMING_LOAD_RULES="LoadRulesTime";
public static final String TIMING_PREPARE_INFMODEL = "PrepareInfModelTime";
public static final String TIMING_PREPARE_QUERY="PrepareQuery";
public static final String TIMING_EXECUTE_QUERY = "ExecuteQueryTime";
protected Boolean[] booleanOptions = {true, false};
protected boolean collectTimingInfo = false;
protected List<ReasonerTiming> timingInfo = null;
protected final String folderNameSeparator = "/"; // the path separator for paths of model resources
protected IConfigurationManager configurationMgr;
protected List<ImportMapping> imports = null;
protected List<String> ruleFilesLoaded;
protected List<Rule> ruleList;
protected OntModelSpec modelSpec;
protected String tbox;
protected OntModel schemaModel;
protected boolean schemaModelIsCachedInferredModel = false;
protected String aboxActualUrl = null; // if instance data has been loaded from a single URL source, this remembers it
protected int dataModelSourceCount = 0; // number of sources of data
protected OntModel dataModel;
protected GenericRuleReasoner reasoner = null;
protected GenericRuleReasoner preBoundReasoner = null;
protected Model infModel;
protected Dataset infDataset = null;
protected boolean newInputFlag = false;
protected boolean initialized = false;
@SuppressWarnings("unused")
private boolean explanationsEnabled = false;
@SuppressWarnings("unused")
private String luceneIndexerClass = null;
// private FileAppender traceAppender = null;
private String outputFormat = "N-TRIPLE";
private String modelName;
protected String instDataNS;
private long tboxLoadTime = 0L;
private boolean derivationLogging = false;
private long queryTimeout = -1L; // Query timeout, -1 means no timeout
// // repo stuff
private String repoType = null;
protected List<ConfigurationItem> preferences = null;
private OntModel tboxModelWithSpec;
private List<ModelError> newErrors = null;
;
public JenaReasonerPlugin() {
// these will have been loaded by the translator and added to the configuration if they are needed
// String pkg = "com.ge.research.sadl.jena.reasoner.builtin.";
// addBuiltin("abs", pkg + "Abs");
// addBuiltin("average", pkg + "Average");
// addBuiltin("ceiling", pkg + "Ceiling");
// addBuiltin("floor", pkg + "Floor");
// addBuiltin("max", pkg + "Max");
// addBuiltin("min", pkg + "Min");
// addBuiltin("noSubjectsOtherThan", pkg + "NoSubjectsOtherThan");
// addBuiltin("notOnlyValue", pkg + "NotOnlyValue");
// addBuiltin("noUnknownValues", pkg + "NoUnknownValues");
// addBuiltin("noValuesOtherThan", pkg + "NoValuesOtherThan");
// addBuiltin("pow", pkg + "Pow");
// addBuiltin("print", pkg + "Print");
// addBuiltin("product", pkg + "Product");
// addBuiltin("sqrt", pkg + "Sqrt");
// addBuiltin("subtractDates", pkg + "SubtractDates");
// addBuiltin("sum", pkg + "Sum");
}
/**
* Method used by translators that need the OntModel with import closure for translation
*
* @return
*/
public OntModel getSchemaModel() {
return schemaModel;
}
/**
* Method to return a list of all rules loaded by the reasoner
*
* @return
*/
public List<Rule> getLoadedRules() {
return ruleList;
}
/**
* Method to set the ConfigurationManager. If not set, a new one will be created.
*
* @param configMgr
* @throws ConfigurationException
*/
public void setConfigurationManager(IConfigurationManager configMgr) throws ConfigurationException {
// if ((configMgr instanceof IConfigurationManagerForEditing)) {
// ((IConfigurationManagerForEditing) configMgr).setReasonerClassName(this.getClass().getCanonicalName());
configurationMgr = configMgr;
}
public int initializeReasoner(String folderName, String modelName, String _repoType) throws ReasonerNotFoundException, ConfigurationException {
return initializeReasoner(folderName, modelName, null, _repoType);
}
@SuppressWarnings("unused")
public GenericRuleReasoner getReasonerOnlyWhenNeeded() throws ConfigurationException {
if (reasoner != null) {
return reasoner;
}
this.ruleFilesLoaded = new ArrayList<String>();
this.ruleList = new ArrayList<Rule>();
try {
if (!configurationMgr.getModelGetter().modelExists(getModelName(), tbox)) {
if (tbox.equals(getModelName())) {
throw new ConfigurationException("The model '" + getModelName() + "' does not have a mapping and was not found.");
}
else {
throw new ConfigurationException("The model with actual URL '" + tbox + "' and name '" + getModelName() + "' does not appear to exist.");
}
}
} catch (MalformedURLException e) {
throw new ConfigurationException("The actual file URL '" + tbox + "' for model '" + getModelName() + "' is not well-formed.");
}
String derval = getStringConfigurationValue(preferences , pDerivationLogging, DERIVATION_NONE);
derivationLogging = (derval != null && !derval.equals(DERIVATION_NONE));
modelSpec = getModelSpec(preferences); // get this for later use when creating InfModel
logger.debug("JenaReasonerPlugin.initializeReasoner, tbox = "+tbox);
try {
if (!tbox.startsWith("file:") && !tbox.startsWith("http:")) {
//assume local file
SadlUtils su = new SadlUtils();
tbox = su.fileNameToFileUrl(tbox);
logger.debug("JenaReasonerPlugin.initializeReasoner, modified tbox = "+tbox);
}
String format = repoType;
if (!validateFormat(format)) {
throw new ConfigurationException("Format '" + format + "' is not supported by reasoner '" + getConfigurationCategory() + "'.");
}
if (format.equals(IConfigurationManager.JENA_TDB)) {
schemaModel = configurationMgr.getModelGetter().getOntModel(getModelName(), tbox, format);
schemaModel.getDocumentManager().setProcessImports(true);
// schemaModel.loadImports();
}
else {
if (tbox.endsWith(".TDB/")) {
// this is a cached inferred TDB model
schemaModel = configurationMgr.getModelGetter().getOntModel(getModelName(), tbox, format);
schemaModelIsCachedInferredModel = true;
return null;
}
else {
schemaModel = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null));
ReadFailureHandler rfHandler = new SadlReadFailureHandler(logger);
schemaModel.getDocumentManager().setProcessImports(true);
schemaModel.getDocumentManager().setReadFailureHandler(rfHandler );
schemaModel.getSpecification().setImportModelGetter((ModelGetter) configurationMgr.getModelGetter());
schemaModel.read(tbox, format);
}
}
} catch (Exception e1) {
e1.printStackTrace();
}
if (logger.isDebugEnabled()) {
logger.debug("schemaModel '" + getModelName() + "' with tbox '" + tbox + "' loaded");
dumpModelToLogger(schemaModel);
}
loadImports();
logger.debug("JenaReasonerPlugin.initializeReasoner, imports size = " + (imports == null ? 0 : imports.size()));
long t2 = System.currentTimeMillis();
loadRules(schemaModel, getModelName());
logger.debug("JenaReasonerPluging.initialize, size of ruleList = "+ruleList.size());
reasoner = new GenericRuleReasoner(ruleList);
reasoner.setDerivationLogging(derivationLogging);
logger.debug("JenaReasonerPluging.initialize, size of ruleList from reasoner = "+reasoner.getRules().size());
reasoner.setMode(getRuleMode(preferences));
long t3 = System.currentTimeMillis();
if (collectTimingInfo) {
timingInfo.add(new ReasonerTiming(TIMING_LOAD_MODEL, "load ontology model", t2 - tboxLoadTime));
int numRules = ruleList.size();
timingInfo.add(new ReasonerTiming(TIMING_LOAD_RULES, "load model " + numRules + " rules", t3 - t2));
}
long t4;
if (collectTimingInfo) {
t4 = System.currentTimeMillis();
timingInfo.add(new ReasonerTiming(TIMING_LOAD_RULES, "bind schema to reasoner", t4 - t3));
}
boolean transitiveClosure = getBooleanConfigurationValue(preferences, pTransitiveClosureCaching, false);
reasoner.setTransitiveClosureCaching(transitiveClosure);
reasoner.setOWLTranslation(getBooleanConfigurationValue(preferences, pOWLTranslation, false));
boolean bTrace = getBooleanConfigurationValue(preferences, pTrace, false);
reasoner.setTraceOn(bTrace);
if (bTrace) {
// traceAppender = new FileAppender();
// configure the appender here, with file location, etc
File tboxfile = null;
try {
SadlUtils su = new SadlUtils();
tboxfile = new File(su.fileUrlToFileName(tbox));
} catch (MalformedURLException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
// if (tboxfile != null && tboxfile.exists()) {
// File modelfolder = tboxfile.getParentFile();
// try {
// traceAppender.setFile(modelfolder.getCanonicalPath() + File.separator + "Temp" + File.separator + "trace.log");
// traceAppender.setImmediateFlush(false);
// traceAppender.activateOptions();
// } catch (IOException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// traceAppender = null;
// else {
// traceAppender = null;
}
else {
// traceAppender = null;
}
if (getBooleanConfigurationValue(preferences, pUseLuceneIndexer, false)) {
luceneIndexerClass = getStringConfigurationValue(preferences, pLuceneIndexerClass, "com.ge.research.sadl.jena.reasoner.LuceneModelIndexerImpl");
}
String strTimeOut = getStringConfigurationValue(preferences, pTimeOut, "-1");
try {
queryTimeout = Long.parseLong(strTimeOut.trim());
}
catch (NumberFormatException e) {
String msg = "Invalid timeout value '" + strTimeOut + "'";
logger.error(msg); addError(new ModelError(msg, ErrorType.ERROR));
}
return reasoner;
}
private boolean validateFormat(String format) {
if (format == null ||
(!format.equals(IConfigurationManager.JENA_TDB) &&
!format.equals(IConfigurationManager.N3_FORMAT) &&
!format.equals(IConfigurationManager.N_TRIPLE_FORMAT) &&
!format.equals(IConfigurationManager.RDF_XML_ABBREV_FORMAT) &&
!format.equals(IConfigurationManager.RDF_XML_FORMAT))) {
return false;
}
return true;
}
public int initializeReasoner(String folderName, String _modelName,
List<ConfigurationItem> _preferences, String _repoType) throws ReasonerNotFoundException, ConfigurationException {
preferences = _preferences;
repoType = _repoType;
setModelName(_modelName);
if (timingInfo == null) {
timingInfo = new ArrayList<ReasonerTiming>();
}
else {
timingInfo.clear();
}
tboxLoadTime = System.currentTimeMillis();
if (configurationMgr == null) {
// Get the correct Mappings from the policy file
OntDocumentManager mgr = OntDocumentManager.getInstance();
mgr.reset();
// mgr.setProcessImports(true);
configurationMgr = ConfigurationManagerFactory.getConfigurationManager(folderName, repoType);
}
//Get the real tbox and rule file path
tbox = configurationMgr.getAltUrlFromPublicUri(getModelName());
if (tbox == null) {
throw new ConfigurationException("No mapping to an actual URL found for model '" + getModelName() + "'.");
}
String format = repoType;
try {
String tdbFolder = configurationMgr.getTdbFolder();
if (configurationMgr.getModelGetter() == null) {
configurationMgr.setModelGetter(new SadlJenaModelGetter(configurationMgr, tdbFolder));
}
format = configurationMgr.getModelGetter().getFormat();
if (!format.equals(IConfigurationManager.JENA_TDB)) {
String ext = tbox.substring(tbox.lastIndexOf('.'));
format = "RDF/XML-ABBREV"; // this will create a reader that will handle either RDF/XML or RDF/XML-ABBREV
if (ext.equalsIgnoreCase(".n3")) {
format = "N3";
}
else if (ext.equalsIgnoreCase(".ntriple") || ext.equalsIgnoreCase(".nt")) {
format = "N-TRIPLE";
}
configurationMgr.getModelGetter().setFormat(format);
}
}
catch (IOException e) {
e.printStackTrace();
}
initialized = true;
return 1;
}
private void dumpModelToLogger(OntModel model) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
model.write(os);
try {
String aString = new String(os.toByteArray(),"UTF-8");
logger.debug(aString);
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private void loadImports() {
if (configurationMgr != null) {
try {
imports = configurationMgr.loadImportedModel(schemaModel.getOntology(modelName),
schemaModel, modelName, null);
} catch (Throwable t) {
// TODO Auto-generated catch block
t.printStackTrace();
}
}
}
public int initializeReasoner(URI modelFile, String modelName,
List<ConfigurationItem> preferences, String _repoType) throws ReasonerNotFoundException {
ruleList = null;
try {
initializeReasoner(modelFile.toString(), modelName, preferences, _repoType);
} catch (ConfigurationException e) {
e.printStackTrace();
}
return 0;
}
public int initializeReasoner(URI modelFile, String modelName, String _repoType)
throws ReasonerNotFoundException {
try {
initializeReasoner(modelFile.toString(), modelName, _repoType);
} catch (ConfigurationException e) {
e.printStackTrace();
}
return 0;
}
public boolean loadRules(String ruleFileName) throws IOException {
if (ruleFileName != null) {
try {
InputStream in = configurationMgr.getJenaDocumentMgr().getFileManager().open(ruleFileName);
if (in != null) {
try {
InputStreamReader isr = new InputStreamReader(in);
BufferedReader br = new BufferedReader(isr);
List<Rule> rules = Rule.parseRules(Rule.rulesParserFromReader(br));
if (rules != null) {
ruleList.addAll(rules);
newInputFlag = true;
return true;
}
} catch (ParserException e) {
String msg = "Error reading rule file '" + ruleFileName + "': " + e.getMessage();
logger.error(msg);
addError(new ModelError(msg, ErrorType.ERROR));
}
finally {
in.close();
}
}
}
catch (RulesetNotFoundException e) {
// ok if not found
return false;
}
catch (HttpException e) {
// ok if not found
return false;
}
}
// dataModelSourceCount++;
return false;
}
public boolean loadRules(URI ruleFileName) throws IOException {
if (ruleFileName != null) {
ruleList.addAll(Rule.rulesFromURL(ruleFileName.toString()));
newInputFlag = true;
dataModelSourceCount++;
return true;
}
//TODO this needs to handle the case where there are no rules
else
return false;
}
public boolean addRules(List<String> rules) {
for(String f:rules)
ruleList.add(Rule.parseRule(f));
if (preBoundReasoner != null) {
reasoner = preBoundReasoner;
preBoundReasoner = null;
}
try {
if (getReasonerOnlyWhenNeeded() != null) {
getReasonerOnlyWhenNeeded().setRules(ruleList);
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
newInputFlag = true;
dataModelSourceCount++;
return true;
}
public boolean addRule(String rule) {
Rule newRule = Rule.parseRule(rule);
try {
deleteRule(newRule.getName());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ruleList.add(newRule);
if (preBoundReasoner != null) {
reasoner = preBoundReasoner;
preBoundReasoner = null;
}
try {
if (getReasonerOnlyWhenNeeded() != null) {
getReasonerOnlyWhenNeeded().setRules(ruleList);
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
newInputFlag = true;
dataModelSourceCount++;
return true;
}
public boolean deleteRule(String ruleName) throws RuleNotFoundException {
try {
getReasonerOnlyWhenNeeded();
} catch (ConfigurationException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
for (int i=0; i<ruleList.size();i++){
Rule r = ruleList.get(i);
String rName = new String(r.getName());
if(rName.equals(ruleName)){
ruleList.remove(r);
if (preBoundReasoner != null) {
reasoner = preBoundReasoner;
preBoundReasoner = null;
}
try {
if (getReasonerOnlyWhenNeeded() != null) {
getReasonerOnlyWhenNeeded().setRules(ruleList);
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
newInputFlag = true;
return true;
}
}
dataModelSourceCount++;
return false;
}
public boolean loadInstanceData(String instanceDatafile) throws IOException, ConfigurationException {
if (!instanceDatafile.startsWith("file:") && !instanceDatafile.startsWith("http:")) {
try {
SadlUtils su = new SadlUtils();
instanceDatafile = su.fileNameToFileUrl(instanceDatafile);
} catch (URISyntaxException e) {
throw new IOException(e);
}
}
getReasonerOnlyWhenNeeded();
initializeDataModel();
dataModel.add(dataModel.getDocumentManager().getFileManager().loadModel(instanceDatafile));
addModelNamespaceToJenaMapAsEmptyPrefix(dataModel);
newInputFlag = true;
dataModelSourceCount++;
if (dataModelSourceCount == 1) {
aboxActualUrl = instanceDatafile;
}
return true;
}
public boolean loadInstanceData(URI instanceDatafile) throws IOException, ConfigurationException {
initializeDataModel();
dataModel.add(FileManager.get().loadModel(instanceDatafile.toString()));
addModelNamespaceToJenaMapAsEmptyPrefix(dataModel);
newInputFlag = true;
dataModelSourceCount++;
if (dataModelSourceCount == 1) {
aboxActualUrl = instanceDatafile.toString();
}
return true;
}
public boolean loadInstanceData(OntModel model) throws ConfigurationException {
getReasonerOnlyWhenNeeded();
if (dataModel == null) {
initializeDataModel();
}
dataModel.add(model);
dataModelSourceCount++;
newInputFlag = true;
return true;
}
public boolean loadInstanceData(InputStream is, String format) throws IOException, ConfigurationException {
try {
BufferedReader in = new BufferedReader(new InputStreamReader(is));
String base = null;
initializeDataModel();
OntModel newModel = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null));
if (format != null) {
RDFReader reader = newModel.getReader(format);
reader.read(newModel, is, base);
}
else {
RDFReader reader = newModel.getReader();
reader.read(newModel, is, base);
}
dataModel.add(newModel);
in.close();
addModelNamespaceToJenaMapAsEmptyPrefix(dataModel);
newInputFlag = true;
dataModelSourceCount++;
return true;
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean addModelNamespaceToJenaMapAsEmptyPrefix(OntModel abmodel) {
String instDataBaseUri = getBaseUriOfModel(abmodel);
if (instDataBaseUri != null) {
setInstanceDataNS(instDataBaseUri + "
abmodel.setNsPrefix("", getInstanceDataNS());
return true;
}
return false;
}
private String getBaseUriOfModel(OntModel model) {
String modelBaseUri = null;
Set<String> importuris = model.listImportedOntologyURIs(true);
ExtendedIterator<Ontology> ontItr = model.listOntologies();
if (ontItr.hasNext()) {
while (ontItr.hasNext()) {
Ontology ont = ontItr.next();
if (modelBaseUri == null) {
modelBaseUri = ont.getURI(); // first is default incase imports are circular
}
if (!importuris.contains(ont.getURI())) {
modelBaseUri = ont.getURI();
break;
}
}
}
return modelBaseUri;
}
public boolean addTriple(String sub, String pred, String obj)
throws TripleNotFoundException, ConfigurationException {
getReasonerOnlyWhenNeeded();
initializeDataModel();
Statement s = null;
try {
s = prepareStatement(sub, pred, obj);
dataModel.add(s);
} catch (InvalidNameException e) {
throw new TripleNotFoundException("Unable to prepare triple (" + sub + ", " + pred + ", " + obj + "): " + e.getMessage());
} catch (MalformedURLException e) {
throw new TripleNotFoundException("Unable to prepare triple (" + sub + ", " + pred + ", " + obj + "): " + e.getMessage());
}
newInputFlag = true;
dataModelSourceCount++;
return true;
}
protected void initializeDataModel() throws ConfigurationException {
if (dataModel == null) {
if (schemaModel == null) {
getReasonerOnlyWhenNeeded();
}
dataModel = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null));
String instNS = getInstanceDataNS();
if (instNS != null) {
Resource importOnt = dataModel.getResource(getModelName());
dataModel.createOntology(getInstanceModelName()).addImport(importOnt);
dataModel.getDocumentManager().setProcessImports(true);
dataModel.loadImports();
dataModel.getDocumentManager().setProcessImports(false);
}
else if (schemaModel != null) {
dataModel.add(schemaModel);
}
infModel = null;
}
}
public boolean deleteTriple(String sub, String pred, String obj)
throws TripleNotFoundException, ConfigurationException {
try {
getReasonerOnlyWhenNeeded();
if (dataModel != null) {
RDFNode[] spo = prepareSubjectPredicateObject(sub, pred, obj);
if (spo != null) {
StmtIterator stI = dataModel.listStatements((Resource)spo[0], (Property)spo[1], spo[2]);
if (stI.hasNext()) {
List<Statement> stmtsToRemove = new ArrayList<Statement>();
while (stI.hasNext()) {
Statement stmt = stI.nextStatement();
stmtsToRemove.add(stmt);
}
stI.close();
for (int i = 0; i < stmtsToRemove.size(); i++) {
dataModel.remove(stmtsToRemove.get(i));
}
newInputFlag = true;
dataModelSourceCount++;
return true;
}
}
else {
return false;
}
}
} catch (InvalidNameException e) {
throw new TripleNotFoundException("Unable to prepare triple (" + sub + ", " + pred + ", " + obj + "): " + e.getMessage());
} catch (MalformedURLException e) {
throw new TripleNotFoundException("Unable to prepare triple (" + sub + ", " + pred + ", " + obj + "): " + e.getMessage());
}
return false;
}
public void updateTriple(String oldSub, String oldPred, String oldObj,
String newSub, String newPred, String newObj)
throws TripleNotFoundException, ConfigurationException {
this.deleteTriple(oldSub, oldPred, oldObj);
this.addTriple(newSub, newPred, newObj);
dataModelSourceCount++;
newInputFlag = true;
}
public List<ModelError> checkModelValidity() {
List<ModelError> results = null;
try {
getReasonerOnlyWhenNeeded();
generateTboxModelWithSpec();
if (tboxModelWithSpec != null) {
try {
ValidityReport report = tboxModelWithSpec.validate();
if (report == null) {
try {
prepareInfModel();
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (infModel != null) {
if (infModel instanceof InfModel) {
report = ((InfModel) infModel).validate();
}
else {
results = new ArrayList<ModelError>();
results.add(new ModelError("Unable to validate; model is not an InfModel", ErrorType.WARNING));
}
}
}
if (report != null) {
if (report.isClean()) {
return null;
}
Iterator<Report> rptitr = report.getReports();
results = new ArrayList<ModelError>();
while (rptitr.hasNext()) {
Report rpt = rptitr.next();
if (rpt.isError()) {
results.add(new ModelError(rpt.toString(), ErrorType.ERROR));
}
else {
results.add(new ModelError(rpt.toString(), ErrorType.WARNING));
}
}
}
else {
results = new ArrayList<ModelError>(1);
results.add(new ModelError("Failed to complete validity check.", ErrorType.ERROR));
}
}
catch (DatatypeFormatException e) {
if (results == null) {
results = new ArrayList<ModelError>();
}
results.add(new ModelError("Exception while validating model: " + e.getLocalizedMessage(), ErrorType.ERROR));
}
catch (Throwable t) {
t.printStackTrace();
if (results == null) {
results = new ArrayList<ModelError>();
}
results.add(new ModelError("Exception while validating model: " + t.getLocalizedMessage(), ErrorType.ERROR));
}
}
else {
results = new ArrayList<ModelError>(1);
results.add(new ModelError("Failed to obtain an inferred model on which to do a validity check.", ErrorType.ERROR));
}
} catch (ConfigurationException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
return results;
}
public DataSource construct(String constructQuery)
throws QueryParseException, QueryCancelledException {
startTrace();
QueryExecution qexec = null;
try {
prepareInfModel();
} catch (ConfigurationException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
Model results = null;
try {
long t1 = System.currentTimeMillis();
qexec = QueryExecutionFactory.create(QueryFactory.create(constructQuery, Syntax.syntaxARQ), this.infModel);
qexec.setTimeout(queryTimeout);
results = qexec.execConstruct();
StmtIterator sitr = results.listStatements(null, RDF.type, OWL.Ontology);
while (sitr.hasNext()) {
Statement s = sitr.nextStatement();
Resource r = s.getSubject();
r.addProperty(RDFS.comment, results.createTypedLiteral("This model is output of " + this.getClass().getName()));
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
results.write(out, getOutputFormat());
String data = out.toString();
StringDataSource ds = new StringDataSource(data, "text/plain");
if (getOutputFormat().equals("N-TRIPLE") || getOutputFormat().equals("N3")) {
ds.setName(getOutputFormat());
}
else {
ds.setName("OWL");
}
if (collectTimingInfo) {
long t2 = System.currentTimeMillis();
timingInfo.add(new ReasonerTiming(TIMING_EXECUTE_QUERY, constructQuery, t2 - t1));
}
return ds;
}
catch (com.hp.hpl.jena.query.QueryCancelledException e) {
e.printStackTrace();
logger.error("query timed out with Exception: " + e.getMessage());
throw new QueryCancelledException("Construct Query '" + constructQuery + "' timed out: " + e.getLocalizedMessage());
}
catch (Exception e) {
e.printStackTrace();
logger.error("query failed with Exception: " + e.getMessage());
throw new QueryParseException("Construct Query '" + constructQuery + "' failed: " + e.getLocalizedMessage());
}
finally {
if (qexec != null) qexec.close();
endTrace();
}
}
public ResultSet ask(String askQuery) throws QueryParseException, QueryCancelledException {
boolean cancelled = false;
ResultSet rs = null;
// synchronized(ReasonerFamily) {
try {
startTrace();
QueryExecution qexec = null;
com.hp.hpl.jena.query.ResultSet results = null;
long t1 = System.currentTimeMillis();
prepareInfModel();
try {
// IndexLARQ index = null;
// if (askQuery.contains("http://jena.hpl.hp.com/ARQ/property#textMatch")) {
// // this query uses Lucene
// if (luceneIndexerClass != null) {
// ILuceneModelIndexer indexer = (ILuceneModelIndexer) Class.forName(luceneIndexerClass).newInstance();
//// OntModel om = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, infModel);
// indexer.setModel(dataModel != null ? dataModel : schemaModel); //indexer.setModel(om);
// IndexBuilderString larqBuilder = indexer.buildModelIndex();
// index = larqBuilder.getIndex();
// else {
// FieldOption fo = IndexReader.FieldOption.ALL;
// IndexBuilderString larqBuilder = new IndexBuilderString();
// larqBuilder.indexStatements(this.infModel.listStatements());
// // larqBuilder.indexStatement(s);
// larqBuilder.closeWriter();
// index = larqBuilder.getIndex();
if (infDataset != null) {
qexec = QueryExecutionFactory.create(QueryFactory.create(askQuery, Syntax.syntaxARQ),infDataset);
}
else {
qexec = QueryExecutionFactory.create(QueryFactory.create(askQuery, Syntax.syntaxARQ), this.infModel);
}
// if (index != null) {
// LARQ.setDefaultIndex(qexec.getContext(), index);
qexec.setTimeout(queryTimeout);
if (askQuery.trim().substring(0, 3).equals("ask")) {
boolean askResult = qexec.execAsk();
String[] columnName = new String[1];
columnName[0] = "ask";
Object array[][] = new Object[1][1];
array[0][0] = askResult;
rs = new ResultSet(columnName, array);
}
else if (askQuery.trim().substring(0, 9).equals("construct")) {
Model constructModel = qexec.execConstruct();
if (constructModel != null) {
StmtIterator sitr = constructModel.listStatements();
if (sitr.hasNext()) {
String[] columnName = new String[3];
columnName[0] = qexec.getQuery().getProjectVars().get(0).getVarName();
columnName[1] = qexec.getQuery().getProjectVars().get(1).getVarName();
columnName[2] = qexec.getQuery().getProjectVars().get(2).getVarName();
List<Object[]> dataList = new ArrayList<Object[]>();
while (sitr.hasNext()) {
Statement stmt = sitr.nextStatement();
Object[] row = new Object[3];
row[0] = stmt.getSubject().toString();
row[1] = stmt.getPredicate().toString();
RDFNode val = stmt.getObject();
if (val instanceof Resource) {
row[2] = ((Resource)val).toString();
}
else if (val instanceof Literal) {
row[2] = ((Literal)val).getValue();
}
else {
row[2] = val.toString();
}
dataList.add(row);
}
Object[][] data = new Object[dataList.size()][3];
for (int r = 0; r < dataList.size(); r++) {
for (int c = 0; c < 3; c++) {
data[r][c] = ((Object[]) dataList.get(r))[c];
}
}
rs = new ResultSet(columnName, data);
}
}
}
else {
results = qexec.execSelect();
rs = convertFromJenaResultSetToReasonerResultSet(results);
}
if (collectTimingInfo) {
long t2 = System.currentTimeMillis();
timingInfo.add(new ReasonerTiming(TIMING_EXECUTE_QUERY, "execute query (" + askQuery + ")", t2 - t1));
}
}
catch (com.hp.hpl.jena.query.QueryCancelledException e) {
rs = null;
cancelled = true;
throw new QueryCancelledException("Query timed out (" + queryTimeout + " seconds): '" + askQuery + "'\n");
}
catch (InferenceCanceledException e) {
rs = null;
throw e;
}
catch (Exception e) {
rs = null;
e.printStackTrace();
logger.error("query failed with Exception: " + e.getMessage());
throw new QueryParseException("Query '" + askQuery + "' failed: " + e.getLocalizedMessage(), e);
}
finally { if (!cancelled && qexec != null) qexec.close(); }
endTrace();
} catch (ConfigurationException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
finally {
}
return rs;
}
protected ResultSet convertFromJenaResultSetToReasonerResultSet(com.hp.hpl.jena.query.ResultSet results) {
if (!results.hasNext()) {
return null;
}
ArrayList<ArrayList<Object>> o = new ArrayList<ArrayList<Object>>();
List<String> queryVars = results.getResultVars();
String[] columnName = new String[queryVars.size()];
columnName = queryVars.toArray(columnName);
while (results.hasNext()) {
QuerySolution soln = results.next();
ArrayList<Object> temp = new ArrayList<Object>();
for (int j = 0; j < columnName.length; j++) {
RDFNode n = soln.get(columnName[j]);
if (n != null && n.isLiteral()) {
Object val = ((Literal)n).getValue();
if (val instanceof XSDDateTime) {
temp.add(((XSDDateTime)val).asCalendar().getTime());
}
else if (val instanceof XSDDuration) {
temp.add(((XSDDuration)val).toString());
}
else {
temp.add(val);
}
}
else if (n != null && n.isResource()) {
if (!((Resource)n).isAnon()){
temp.add(((Resource)n).getURI());
}
else {
temp.add(n.toString() + "(blank node)");
}
}
else {
temp.add(n == null? n : n.toString()); // for queries with OPTIONAL n can be null
}
}
o.add(temp);
}
Object array[][] = new Object[o.size()][columnName.length];
for(int i=0; i<o.size(); i++)
array[i] = (o.get(i)).toArray(new Object[columnName.length]);
ResultSet rs = new ResultSet(columnName, array);
return rs;
}
public ResultSet ask(String sub, String pred, String obj)
throws TripleNotFoundException {
startTrace();
try {
prepareInfModel();
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
long t1 = 0L;
ReasonerTiming rt = null;
if (collectTimingInfo) {
rt = new ReasonerTiming(TIMING_EXECUTE_QUERY, "ask(" + sub + "," + pred + "," + obj + ")", 0);
t1 = System.currentTimeMillis();
}
int i = 0;
boolean bIncludeSubject = false;
boolean bIncludePredicate = false;
boolean bIncludeObject = false;
int colcount = 0;
ArrayList<ArrayList<Object>> result = new ArrayList<ArrayList<Object>>();
Resource r = null;
if (sub != null) {
r = ResourceFactory.createResource(sub);
}
else {
bIncludeSubject = true;
colcount++;
}
Property p = null;
if (pred != null) {
p = ResourceFactory.createProperty(pred);
}
else {
bIncludePredicate = true;
colcount++;
}
RDFNode n = null;
if (obj != null) {
Object objVal = null;
if (!obj.startsWith("http:
objVal = xsdStringToObject(obj);
if (objVal != null) {
if (dataModel != null) {
n = dataModel.createTypedLiteral(objVal);
}
else {
n = infModel.createTypedLiteral(objVal);
}
}
}
if (n == null) {
n = ResourceFactory.createResource(obj);
}
}
else {
bIncludeObject = true;
colcount++;
}
StmtIterator stI = this.infModel.listStatements(r, p, n);
if (!stI.hasNext()) {
return null;
}
while(stI.hasNext()){
Statement s = stI.next();
result.add(new ArrayList<Object>());
if (sub == null) {
result.get(i).add(s.getSubject().getURI());
}
if (pred == null) {
result.get(i).add(s.getPredicate().getURI());
}
if (obj == null) {
RDFNode objval = s.getObject();
if (objval != null && objval.isLiteral()) {
result.get(i).add(((Literal)objval).getValue());
}
else if (objval != null && objval.isResource() && !((Resource)objval).isAnon()){
result.get(i).add(((Resource)objval).getURI());
}
else {
result.get(i).add(objval); // for queries with OPTIONAL n can be null
}
}
i++;
}
Object[][] array = new Object[result.size()][result.get(0).size()];
for(int j=0; j< result.size(); j++)
array[j] = result.get(j).toArray();
final String[] columnName = new String[colcount]; // {"Subject", "Predicate", "Object"};
int colnameindex = 0;
if (bIncludeSubject) {
columnName[colnameindex++] = "Subject";
}
if (bIncludePredicate) {
columnName[colnameindex++] = "Predicate";
}
if (bIncludeObject) {
columnName[colnameindex++] = "Object";
}
ResultSet rs = new ResultSet(columnName, array);
endTrace();
if (collectTimingInfo && rt != null) {
long t2 = System.currentTimeMillis();
rt.setMilliseconds(t2 - t1);
timingInfo.add(rt);
}
return rs;
}
public List<Explanation> explain(String rulename) {
startTrace();
try {
if (getReasonerOnlyWhenNeeded() != null) {
getReasonerOnlyWhenNeeded().setDerivationLogging(true);
List<Rule> rules = getReasonerOnlyWhenNeeded().getRules();
for (int i = 0; i < rules.size(); i++) {
String ruleName = rules.get(i).getName();
if (ruleName != null && ruleName.equals(rulename)) {
return explainRule(rules.get(i), null);
}
}
List<Explanation> explanations = new ArrayList<Explanation>();
Explanation expl = new Explanation(null, "Failed to get explanation for rule '" + rulename + "'. Rule not in loaded rule set.");
explanations.add(expl);
endTrace();
return explanations;
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
protected List<Explanation> explainRule(Rule rule, List<Explanation> explains) {
if (explains == null) {
explains = new ArrayList<Explanation>();
}
ClauseEntry[] premises = rule.getBody();
int startingIndex = (explains.size() > 0) ? explains.size() : 0;
int testingThruPremise = 0; // start with first premise only
int resultsFound = 0;
List<String> premisesAsStrings = new ArrayList<String>();
do {
String sparqlSelect = "select ";
String filterString = null;
List<String> selectVars = new ArrayList<String>();
String sparqlWhere = " where {";
int whereCnt = 0;
for (int pi = 0; premises != null && pi <= testingThruPremise && pi < premises.length; pi++) {
if (premises[pi] instanceof TriplePattern) {
if (whereCnt++ > 0) {
sparqlWhere += " . ";
}
String tripleStr = generateTripleStringWithVars(selectVars, (TriplePattern)premises[pi]);
sparqlWhere += tripleStr;
if (pi == testingThruPremise) {
GraphPatternElement gpe = tripleElementFromTriplePattern((TriplePattern)premises[pi]);
Explanation exp = new Explanation(gpe);
exp.setPatternPrefix("Rule " + rule.getName() + ": Premise " + (pi + 1) + " of " + premises.length + ": ");
explains.add(exp);
}
}
else if (premises[pi] instanceof Functor) {
String functorName = ((Functor)premises[pi]).getName();
com.hp.hpl.jena.graph.Node[] args = ((Functor)premises[pi]).getArgs();
@SuppressWarnings("unused")
String functorMsg = functorName + "(";
for (int ai = 0; args != null && ai < args.length; ai++) {
functorMsg += nodeShortString(args[ai]) + (ai < (args.length - 1) ? ", " : "");
}
functorMsg += ")";
int existingFilterStringLength = (filterString != null) ? filterString.length() : 0;
filterString = functorToFilter((Functor)premises[pi], filterString);
if (pi == testingThruPremise) {
BuiltinElement be = new BuiltinElement();
be.setFuncName(functorName);
for (int ai = 0; ai < args.length; ai++) {
be.addArgument(graphNodeToSadlNode(args[ai]));
}
Explanation exp = new Explanation(be);
if (filterString != null && filterString.length() > existingFilterStringLength) {
exp.setPatternPrefix("Rule " + rule.getName() + ": Premise " + (pi + 1) + " of " + premises.length + ": ");
}
else {
exp.setPatternPrefix("Rule " + rule.getName() + ": Premise " + (pi + 1) + " of " + premises.length + " ignored:");
}
explains.add(exp);
}
}
else {
if (pi == testingThruPremise) {
Explanation exp = new Explanation(null, "Unhandled premise type: " + premises[pi].getClass().getCanonicalName());
exp.setPatternPrefix("Rule " + rule.getName() + ": Premise " + (pi + 1) + " of " + premises.length + ": ");
explains.add(exp);
}
}
}
if (selectVars != null && selectVars.size() > 0) {
for (int vi = 0; vi < selectVars.size(); vi++) {
sparqlSelect += selectVars.get(vi) + " ";
}
String q = sparqlSelect + sparqlWhere + (filterString != null ? (" . FILTER(" + filterString + ")") : "") + "}";
try {
ResultSet rs = processRuleQuery(rule, premisesAsStrings, q);
if (rs == null || rs.getRowCount() == 0) {
resultsFound = 0;
List<String> explanations = new ArrayList<String>();
explanations.add("Premises through " + (testingThruPremise + 1) + " had no matches.");
explanations.add("(SPARQL Query equivalent: " + q + ")");
explains.get(startingIndex + testingThruPremise).setExplanations(explanations);
}
else {
resultsFound = rs.getRowCount();
List<String> explanations = new ArrayList<String>();
explanations.add("Premises through " + (testingThruPremise + 1) + " had " + resultsFound + " matches.");
explanations.add("(SPARQL Query: " + q + ")");
String varNames = "";
String[] header = rs.getColumnNames();
for (int i = 0; i < header.length; i++) {
if (i > 0) varNames += ", ";
varNames += header[i];
}
explanations.add(varNames);
for (int i = 0; i < rs.getRowCount(); i++) {
String rowStr = "";
for (int j = 0; j < rs.getColumnCount(); j++) {
if (j > 0) rowStr += ", ";
rowStr += rs.getResultAt(i, j).toString();
}
explanations.add(rowStr);
}
explains.get(startingIndex + testingThruPremise).setExplanations(explanations);
}
} catch (QueryCancelledException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (QueryParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else {
ClauseEntry clause = premises[testingThruPremise];
if (clause instanceof TriplePattern) {
ExtendedIterator<Triple> eitr = infModel.getGraph().find(((TriplePattern)clause).getSubject(),
((TriplePattern)clause).getPredicate(),
((TriplePattern)clause).getObject());
if (eitr.hasNext()) {
List<String> explanations = new ArrayList<String>();
explanations.add("Premise " + (testingThruPremise + 1) + " has matches.");
while (eitr.hasNext()) {
explanations.add(eitr.next().toString());
resultsFound++;
}
explains.get(startingIndex + testingThruPremise).setExplanations(explanations);
}
else {
List<String> explanations = new ArrayList<String>();
explanations.add("Premises through " + (testingThruPremise + 1) + " had no matches.");
explains.get(startingIndex + testingThruPremise).setExplanations(explanations);
}
}
}
testingThruPremise++;
} while (resultsFound > 0 && testingThruPremise < premises.length);
return explains;
}
protected String functorToFilter(Functor functor, String oldFilterString) {
String filterString = null;
String functorName = functor.getName();
com.hp.hpl.jena.graph.Node[] args = functor.getArgs();
if (functorName.equals("equal")) {
if (filterString == null && args.length > 1) {
filterString = nodeToFilterString(args[0]);
filterString += " = ";
filterString += nodeToFilterString(args[1]);
}
}
else if (functorName.equals("notEqual")) {
if (filterString == null && args.length > 1) {
filterString = nodeToFilterString(args[0]);
filterString += " != ";
filterString += nodeToFilterString(args[1]);
}
}
else if (functorName.equals("ge")) {
if (filterString == null && args.length > 1) {
filterString = nodeToFilterString(args[0]);
filterString += " >= ";
filterString += nodeToFilterString(args[1]);
}
}
else if (functorName.equals("greaterThan")) {
if (filterString == null && args.length > 1) {
filterString = nodeToFilterString(args[0]);
filterString += " > ";
filterString += nodeToFilterString(args[1]);
}
}
else if (functorName.equals("noValue")) {
if (filterString == null && args.length > 1) {
filterString = "NOT EXISTS { ";
filterString += nodeToFilterString(args[0]);
filterString += " ";
filterString += nodeToFilterString(args[1]);
filterString += " ";
if (args.length == 3) {
filterString += nodeToFilterString(args[2]) + " }";
}
else {
// make up a new variable
filterString += "?unspecified_value }";
}
}
}
if (filterString == null) {
return oldFilterString;
}
else if (oldFilterString == null) {
return filterString;
}
else {
return oldFilterString + " && " + filterString;
}
}
protected String nodeToFilterString(com.hp.hpl.jena.graph.Node node) {
if (node instanceof Node_Literal) {
return ((Node_Literal)node).getLiteralLexicalForm();
}
else if (node instanceof Node_URI) {
return "<" + ((Node_URI)node).getURI() + ">";
}
else if (node.getName() != null) {
return node.getName();
}
return node.toString();
}
/**
* Convert a Jena graph TriplePattern to a SADL model TripleElement
* @param tp
* @return
*/
protected TripleElement tripleElementFromTriplePattern(TriplePattern tp) {
TripleElement te = new TripleElement();
te.setSubject(graphNodeToSadlNode(tp.asTriple().getSubject()));
te.setPredicate(graphNodeToSadlNode(tp.asTriple().getPredicate()));
te.setObject(graphNodeToSadlNode(tp.asTriple().getObject()));
return te;
}
/**
* Convert a Jena graph Node to a SADL model Node
* @param node
* @return
*/
protected Node graphNodeToSadlNode(com.hp.hpl.jena.graph.Node node) {
if (node instanceof Node_Variable) {
return new VariableNode(((Node_Variable)node).getName().substring(1));
}
else if (node instanceof Node_URI) {
return new NamedNode(((Node_URI)node).getURI());
}
else if (node instanceof Node_Literal){
com.ge.research.sadl.model.gp.Literal lit = new com.ge.research.sadl.model.gp.Literal();
lit.setValue(((Node_Literal)node).getLiteral().getValue());
return lit;
}
else {
return new NamedNode(node.toString());
}
}
protected String tripleShortString(TriplePattern pattern) {
String tripleStr = nodeShortString(pattern.getSubject()) + " ";
tripleStr += nodeShortString(pattern.getPredicate()) + " ";
tripleStr += nodeShortString(pattern.getObject()) + " ";
return tripleStr;
}
protected String nodeShortString(com.hp.hpl.jena.graph.Node n) {
if (n instanceof Node_RuleVariable) {
return((Node_RuleVariable)n).toString();
}
else if (n instanceof Node_URI) {
return n.getLocalName();
}
else {
return "<" + n.toString() + "> ";
}
}
protected String generateTripleStringWithVars(List<String> selectVars, TriplePattern pattern) {
String tripleStr = "";
com.hp.hpl.jena.graph.Node s = pattern.getSubject();
if (s instanceof Node_RuleVariable) {
String vn = ((Node_RuleVariable)s).toString();
if (!selectVars.contains(vn)) {
selectVars.add(vn);
}
tripleStr += vn + " ";
}
else {
tripleStr += "<" + s.toString() + "> ";
}
com.hp.hpl.jena.graph.Node p = pattern.getPredicate();
if (p instanceof Node_RuleVariable) {
String vn = ((Node_RuleVariable)p).toString();
if (!selectVars.contains(vn)) {
selectVars.add(vn);
}
tripleStr += vn + " ";
}
else {
tripleStr += "<" + p.toString() + "> ";
}
com.hp.hpl.jena.graph.Node o = pattern.getObject();
if (o instanceof Node_RuleVariable) {
String vn = ((Node_RuleVariable)o).toString();
if (!selectVars.contains(vn)) {
selectVars.add(vn);
}
tripleStr += vn + " ";
}
else if (o instanceof Node_URI){
tripleStr += "<" + o.toString() + "> ";
}
else if (o instanceof Node_Literal){
Object objVal = ((Node_Literal)o).getLiteralValue();
tripleStr += JenaTranslatorPlugin.literalValueToString(objVal, TranslationTarget.QUERY_TRIPLE);
}
return tripleStr;
}
@SuppressWarnings("unused")
protected ResultSet processRuleQuery(com.hp.hpl.jena.reasoner.rulesys.Rule rule, List<String> premisesAsStrings, String q) throws QueryParseException, QueryCancelledException {
logger.debug("Explanation executing query: " + q);
ResultSet rs = ask(q);
if (rs != null) {
int numResults = rs.getRowCount();
String[] headers = rs.getColumnNames();
String headerStr = " ";
for (int hi = 0; hi < headers.length; hi++) {
if (hi > 0) headerStr += ", ";
headerStr += "?" + headers[hi].toString();
}
for (int row = 0; row < numResults; row++) {
String rowStr = " ";
for (int col = 0; col < rs.getColumnCount(); col++) {
if (col > 0) rowStr += ", ";
Object o = rs.getResultAt(row, col);
if (o instanceof com.hp.hpl.jena.graph.Node) {
rowStr += nodeShortString((com.hp.hpl.jena.graph.Node)o);
}
else if (o instanceof String && ((String)o).indexOf('
rowStr += ((String)o).substring(((String)o).indexOf('
}
else {
rowStr += o.toString();
}
}
}
}
return rs;
}
public List<Explanation> explain(List<GraphPatternElement> patterns) {
startTrace();
try {
if (getReasonerOnlyWhenNeeded() != null) {
getReasonerOnlyWhenNeeded().setDerivationLogging(true);
prepareInfModel();
List<Explanation> explanations = new ArrayList<Explanation>();
for (int i = 0; patterns != null && i < patterns.size(); i++) {
GraphPatternElement gpe = patterns.get(i);
explanations = explainGraphPatternElement(explanations, gpe);
}
endTrace();
return explanations;
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
protected List<Explanation> explainGraphPatternElement(List<Explanation> explanations, GraphPatternElement gpe) {
if (gpe instanceof Junction) {
explanations = explainGraphPatternElement(explanations, (GraphPatternElement) ((Junction)gpe).getLhs());
explanations = explainGraphPatternElement(explanations, (GraphPatternElement) ((Junction)gpe).getRhs());
}
else if (gpe instanceof TripleElement) {
Resource subj = null;
Property prop = null;
RDFNode obj = null;
Node sn = ((TripleElement)gpe).getSubject();
Node pn = ((TripleElement)gpe).getPredicate();
Node on = ((TripleElement)gpe).getObject();
if (sn instanceof NamedNode) {
if(((NamedNode)sn).getNodeType() != null && ((NamedNode)sn).getNodeType().equals(NodeType.InstanceNode)) {
subj = infModel.getResource(((NamedNode)sn).toFullyQualifiedString());
}
}
if (pn instanceof RDFTypeNode) {
prop = RDF.type;
}
else if (pn instanceof NamedNode) {
if(((NamedNode)pn).getNodeType() != null &&
(((NamedNode)pn).getNodeType().equals(NodeType.PropertyNode) ||
((NamedNode)pn).getNodeType().equals(NodeType.ObjectProperty) ||
((NamedNode)pn).getNodeType().equals(NodeType.DataTypeProperty))) {
prop = schemaModel.getOntProperty(((NamedNode)pn).toFullyQualifiedString());
if (prop == null) {
prop = infModel.getProperty(((NamedNode)pn).toFullyQualifiedString());
}
}
}
if (on instanceof NamedNode) {
if(((NamedNode)on).getNodeType() != null && !((NamedNode)on).getNodeType().equals(NodeType.VariableNode)) { // any type except variable
obj = infModel.getResource(((NamedNode)on).toFullyQualifiedString());
}
}
else if (on instanceof com.ge.research.sadl.model.gp.Literal){
if (prop.canAs(OntProperty.class)) {
try {
obj = UtilsForJena.getLiteralMatchingDataPropertyRange(schemaModel, prop.as(OntProperty.class), ((com.ge.research.sadl.model.gp.Literal)on).getValue());
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (obj == null) {
obj = schemaModel.createTypedLiteral(((com.ge.research.sadl.model.gp.Literal)on).getValue());
}
}
else {
// ok to not have a value
}
StmtIterator stI = this.infModel.listStatements(subj, prop, obj);
if (stI.hasNext()) {
while (stI.hasNext()) {
Statement stmt = stI.nextStatement();
Iterator<Derivation> itr = getDerivation(stmt.asTriple());
TripleElement tpl = null;
if (subj == null || prop == null || obj == null) {
tpl = new TripleElement();
NamedNode snn = new NamedNode(stmt.getSubject().toString());
snn.setNamespace(stmt.getSubject().getNameSpace());
tpl.setSubject(snn);
NamedNode pnn = new NamedNode(stmt.getPredicate().getLocalName());
pnn.setNamespace(stmt.getPredicate().getNameSpace());
tpl.setPredicate(pnn);
RDFNode onode = stmt.getObject();
if (onode instanceof Resource) {
NamedNode onn = new NamedNode(((Resource)onode).toString());
onn.setNamespace(((Resource)onode).getNameSpace());
tpl.setObject(onn);
}
else if (onode instanceof Literal){
com.ge.research.sadl.model.gp.Literal ol = new com.ge.research.sadl.model.gp.Literal();
ol.setValue(((Literal)onode).getValue());
tpl.setObject(ol);
}
}
else {
tpl = (TripleElement) gpe;
}
if (itr != null && itr.hasNext()) {
HashMap<String, Derivation> dvs = new HashMap<String, Derivation>();
while (itr.hasNext()) {
Derivation dv = itr.next();
dvs.put(dv.toString(), dv);
}
Iterator<Derivation> ditr = dvs.values().iterator();
while (ditr.hasNext()) {
Derivation dv = ditr.next();
Explanation expl = new Explanation(tpl, deepDerivationTrace(dv, true, 4, 0, true));
expl.setPatternPrefix("Derivation of ");
explanations.add(expl);
}
}
else {
Explanation expl = new Explanation(tpl, "Statement is true in model but no derivation available.");
expl.setPatternPrefix("Derivation of ");
explanations.add(expl);
}
}
}
else {
// no matches: look for rules that might infer the desired statement
List<Rule> matchingRules = findRulesInferringStatement(sn, pn,on);
for (int j = 0; matchingRules != null && j < matchingRules.size(); j++) {
Explanation expl = new Explanation(gpe, "Statement not found but might be inferred by rule '" + matchingRules.get(j).getName() + "'.\n");
expl.setPatternPrefix("Possible inference of ");
explanations.add(expl);
explanations = explainRule(matchingRules.get(j), explanations);
}
}
}
return explanations;
}
protected List<Rule> findRulesInferringStatement(Node sn, Node pn, Node on) {
String snUri = null;
// if the subject is a variable leave snUri null
if (sn instanceof NamedNode && !((NamedNode)sn).getNodeType().equals(NodeType.VariableNode)) {
snUri = ((NamedNode)sn).toFullyQualifiedString();
}
String pnUri = null;
// if the predicate is a variable leave pnUri null
if (pn instanceof NamedNode && !((NamedNode)pn).getNodeType().equals(NodeType.VariableNode)) {
pnUri = ((NamedNode)pn).toFullyQualifiedString();
}
String onUri = null;
// if the object is a variable leave onUri null
if (on instanceof NamedNode && !((NamedNode)on).getNodeType().equals(NodeType.VariableNode)) {
onUri = ((NamedNode)on).toFullyQualifiedString();
}
List<Rule> allRules;
try {
if (getReasonerOnlyWhenNeeded() != null) {
allRules = getReasonerOnlyWhenNeeded().getRules();
List<Rule> matchingRules = null;
for (int i = 0; allRules != null && i < allRules.size(); i++) {
Rule rl = allRules.get(i);
ClauseEntry[] hdelmts = rl.getHead();
for (int j = 0; hdelmts != null && j < hdelmts.length; j++) {
ClauseEntry cls = hdelmts[j];
if (cls instanceof TriplePattern) {
// at this time we only consider triple patterns
com.hp.hpl.jena.graph.Node sjn = ((TriplePattern)cls).getSubject();
com.hp.hpl.jena.graph.Node pjn = ((TriplePattern)cls).getPredicate();
com.hp.hpl.jena.graph.Node ojn = ((TriplePattern)cls).getObject();
if ((snUri != null && sjn instanceof Node_URI && ((Node_URI)sjn).getURI().equals(snUri)) ||
sjn instanceof Node_Variable || snUri == null) {
// if the subject is an exact match or if the rule triple pattern subject is a variable or
// the subject to be matched is a variable
if ((pnUri != null && pjn instanceof Node_URI && ((Node_URI)pjn).getURI().equals(pnUri)) ||
(pnUri == null && pjn instanceof Node_Variable)) {
// if the predicate is an exact match or if the pattern and rule triple are both variables
if ((onUri == null) ||
(pnUri != null && ojn instanceof Node_URI && ((Node_URI)ojn).getURI().equals(onUri))) {
// if the pattern object to be matched is a variable or
// it is an exact match or
if (matchingRules == null) {
matchingRules = new ArrayList<Rule>();
}
matchingRules.add(rl);
break;
}
}
}
}
}
}
return ((matchingRules != null && matchingRules.size() > 0) ? matchingRules : null);
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
protected Iterator<Derivation> getDerivation(Triple match) {
return ((InfGraph) infModel.getGraph()).getDerivation(match);
}
protected StringBuilder indent(StringBuilder sb, int level) {
for (int i = 0; i < level; i++) {
sb.append(" ");
}
return sb;
}
protected String deepDerivationTrace(Derivation d, boolean bindings, int indent, int level, boolean includeValue) {
if (d instanceof RuleDerivation) {
StringBuilder sb = new StringBuilder();
if (bindings) {
sb = indent(sb, indent + 4);
sb.append((level > 0 ? "which " : "") + "was concluded by: " + d.toString() + "\n");
}
int margin = indent + 12;
List<Triple> matches = ((RuleDerivation) d).getMatches();
if (matches != null && matches.size() > 0) {
sb = indent(sb, indent + 8);
sb.append("based on matching conditions:\n");
for (int i = 0; i < matches.size(); i++) {
Triple match = matches.get(i);
Iterator<Derivation> derivations = getDerivation(match);
if (derivations == null || !derivations.hasNext()) {
sb = indent(sb, indent + 12);
if (match == null) {
// A primitive
ClauseEntry term = ((RuleDerivation) d).getRule().getBodyElement(i);
if (term instanceof Functor) {
sb.append(((Functor)term).getName() + "(");
com.hp.hpl.jena.graph.Node[] args = ((Functor)term).getArgs();
for (int j = 0; j < args.length; j++) {
String nstr;
if (args[j].isURI()) {
nstr = args[j].getLocalName();
}
else {
nstr = args[j].toString();
}
if (nstr.startsWith("?")) {
nstr = nstr.substring(1);
}
if (j > 0) {
sb.append(", ");
}
sb.append(nstr);
}
sb.append(")\n");
} else {
sb.append("call to built in");
}
} else {
sb.append(tripleToString(match));
sb.append("\n");
}
} else {
sb = indent(sb, indent + 12);
sb.append(tripleToString(match));
if (derivations.hasNext()) {
HashMap<String, Derivation> dvs = new HashMap<String, Derivation>();
while (derivations.hasNext()) {
Derivation derivation = derivations.next();
dvs.put(derivation.toString(), derivation);
}
Iterator<Derivation> ditr = dvs.values().iterator();
while (ditr.hasNext()) {
Derivation derivation = ditr.next();
// if (!derivationAlreadyShown(derivation, seen, out, margin)) {
// if (seen != null && derivation instanceof RuleDerivation && seen.contains(derivation)) {
// PrintUtil.printIndent(out, margin + 4);
//// out.println(tripleToString(match) + " - already shown");
// out.println("- explanation already shown");
// } else {
// if (seen != null && !seen.contains(derivation)) {
// if (seen == null) {
// seen = new HashSet();
// seen.add(derivation);
sb.append("\n");
sb.append(deepDerivationTrace(derivation, bindings, margin, level + 4, includeValue));
}
}
}
}
}
return sb.toString();
}
else {
return d.toString();
}
}
protected String tripleToString(Triple t) {
StringBuffer sb = new StringBuffer();
sb.append(tripleSubjectToString(t.getSubject()));
sb.append(" ");
sb.append(t.getPredicate().getLocalName());
sb.append(" ");
com.hp.hpl.jena.graph.Node n = t.getObject();
sb.append(tripleValueToString(n));
return sb.toString();
}
protected String tripleSubjectToString(com.hp.hpl.jena.graph.Node s) {
if (s.isURI()) {
return s.getLocalName();
}
else {
return s.toString();
}
}
public String tripleValueToString(com.hp.hpl.jena.graph.Node n) {
StringBuffer sb = new StringBuffer();
if (n.isLiteral()) {
Object val = n.getLiteralValue();
if (val instanceof String) {
sb.append("\"");
sb.append(n.getLiteralLexicalForm());
sb.append("\"");
}
else {
sb.append(n.getLiteralLexicalForm());
}
}
else if (n.isURI()) {
sb.append(n.getLocalName());
}
else {
sb.append(n.toString());
}
return sb.toString();
}
protected void prepareInfModel() throws ConfigurationException {
getReasonerOnlyWhenNeeded();
if (infModel == null || newInputFlag == true) {
if (schemaModelIsCachedInferredModel) {
infModel = schemaModel;
}
else {
synchronized(ReasonerFamily) {
long t1 = System.currentTimeMillis();
generateTboxModelWithSpec();
logger.debug("In prepareInfModel, modelSpec: "+modelSpec.toString());
logger.debug("In prepareInfModel, reasoner rule count: "+getReasonerOnlyWhenNeeded().getRules().size());
infModel = ModelFactory.createInfModel(reasoner, tboxModelWithSpec);
if (collectTimingInfo) {
infModel.size(); // this forces instantiation of the inference model
long t2 = System.currentTimeMillis();
timingInfo.add(new ReasonerTiming(TIMING_PREPARE_INFMODEL, "prepare inference model", t2 - t1));
}
}
}
}
else if(newInputFlag == true) {
logger.debug("In prepareInfModel, reusing infModel with newInputFlag is true");
if (infModel instanceof InfModel) {
synchronized(ReasonerFamily) {
logger.debug("In prepareInfModel, reusing infModel, rebinding existing infModel");
((InfModel) infModel).rebind();
infModel.size(); // force re-instantiation?
}
}
} else {
logger.debug("In prepareInfModel, reusing infModel without any changes, newInputFlag is false");
}
newInputFlag = false;
}
private void generateTboxModelWithSpec() {
if (schemaModelIsCachedInferredModel) {
// don't need a model spec, new OntModel; use the model on the TDB Dataset directly
if (dataModel != null) {
tboxModelWithSpec = dataModel;
}
else {
tboxModelWithSpec = schemaModel;
}
}
else {
// use the data to create a new OntModel with the specified model spec
if (dataModel != null) {
tboxModelWithSpec = ModelFactory.createOntologyModel(modelSpec, dataModel);
}
else {
tboxModelWithSpec = ModelFactory.createOntologyModel(modelSpec, schemaModel);
}
}
}
protected void loadRules(OntModel m, String modelName) {
try {
String altUrl = configurationMgr.getAltUrlFromPublicUri(modelName);
if (altUrl == null) {
throw new ConfigurationException("Model URI '" + modelName + "' not found in mappings!");
}
if (altUrl != null) {
String rulefn = altUrl.substring(0, altUrl.lastIndexOf(".")) + ".rules";
if (!ruleFilesLoaded.contains(rulefn)) {
if (loadRules(rulefn)) {
ruleFilesLoaded.add(rulefn);
}
}
}
if (imports != null) {
for (int i = 0; i < imports.size(); i++) {
ImportMapping impMap = imports.get(i);
String impUri = impMap.getPublicURI();
altUrl = impMap.getActualURL();
if (altUrl == null) {
altUrl = configurationMgr.getAltUrlFromPublicUri(impUri);
}
if (altUrl != null) {
String rulefn = altUrl.substring(0, altUrl.lastIndexOf(".")) + ".rules";
if (!ruleFilesLoaded.contains(rulefn)) {
if (loadRules(rulefn)) {
ruleFilesLoaded.add(rulefn);
}
}
}
}
}
} catch (Throwable e) {
// TODO Auto-generated catch block
// e.printStackTrace();
addError(new ModelError(e.getMessage(), ErrorType.ERROR));
}
}
protected static synchronized boolean addBuiltin(String name, String fullyQualifiedClassName, IConfigurationManager configMgr) {
name = name.trim();
fullyQualifiedClassName = fullyQualifiedClassName.trim();
if (fullyQualifiedClassName.substring(0, fullyQualifiedClassName.lastIndexOf('.')).equals(Product.class.getPackage().toString())) {
// don't need to register Jena builtins
return false;
}
try {
// Class<?> builtinCls = Class.forName(fullyQualifiedClassName, true, Thread.currentThread().getContextClassLoader());
Class<?> builtinCls = Class.forName(fullyQualifiedClassName);
Constructor<?> c = builtinCls.getConstructor();
Object javaInstance = c.newInstance();
if (javaInstance instanceof Builtin) {
BuiltinRegistry.theRegistry.register((Builtin) javaInstance);
if (javaInstance instanceof CancellableBuiltin) {
((CancellableBuiltin)javaInstance).setConfigMgr(configMgr);
}
}
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (NoSuchMethodException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return false;
}
return true;
}
public boolean configure(ConfigurationItem configItem) {
String category = configItem.getLeafCategory();
if (category == null) {
return false;
}
List<NameValuePair> nvpList = configItem.getNameValuePairs();
if (IConfigurationManager.BuiltinCategory.equals(category)) {
Object nameObj = configItem.getNamedValue("name");
if (nameObj != null) {
String name = nameObj.toString();
Object clssObj = configItem.getNamedValue("class");
if (clssObj != null) {
String clss = clssObj.toString();
return addBuiltin(name, clss, configurationMgr);
}
}
return false;
}
else {
if (configuration == null) {
configuration = new HashMap<String, Object>();
}
for (int j = 0; nvpList != null && j < nvpList.size(); j++) {
NameValuePair nvp = nvpList.get(j);
String name = nvp.getName();
Object value = nvp.getValue();
configuration.put(name, value);
}
return true;
}
}
/**
* This is specific to addTriple
* @param sub
* @param pred
* @param obj
* @return
* @throws InvalidNameException
* @throws MalformedURLException
*/
protected Statement prepareStatement(String sub, String pred, String obj) throws InvalidNameException, MalformedURLException {
RDFNode[] spo = prepareSubjectPredicateObject(sub, pred, obj);
if (spo != null && spo.length == 3) {
Resource r = (Resource) spo[0];
Property p = (Property) spo[1];
RDFNode n = spo[2];
Statement s = null;
if (r == null) {
throw new InvalidNameException("Not able to resolve triple subject '" + sub + "'.");
}
else if (p == null) {
throw new InvalidNameException("Not able to resolve triple predicate '" + pred + "'.");
}
else if (n == null) {
if (schemaModel != null) {
schemaModel.write(System.out, "N-TRIPLE");
}
if (dataModel != null) {
dataModel.write(System.out, "N-TRIPLE");
}
throw new InvalidNameException("Not able to resolve triple object '" + obj + "'.");
}
else {
s = ResourceFactory.createStatement(r, p, n);
}
return s;
}
throw new InvalidNameException("Unexpected error resolving triple <" + sub + ", " + pred + ", " + obj + ">");
}
private RDFNode[] prepareSubjectPredicateObject(String sub, String pred,
String obj) throws InvalidNameException, MalformedURLException {
Property p = null;
if (pred != null) {
p = ResourceFactory.createProperty(pred);
}
RDFNode n = null;
if (obj != null) {
if (obj.startsWith("http://") && obj.contains("
// this looks like a URI -- this would fail if there were a string value assigned that looked like a URI...
n = getOntResource(obj);
if (n == null) {
throw new InvalidNameException("Resource name '" + obj + "' not found in models.");
}
}
else {
if (p.equals(RDF.type)) {
// this is an invalid object
throw new InvalidNameException("'" + obj + "' is not a valid class name.");
}
else if (p.canAs(OntProperty.class) && ((OntProperty)p.as(OntProperty.class)).isObjectProperty()) {
// this is an invalid object
throw new InvalidNameException("'" + obj + "' is not a valid value for property '" + pred + "'");
}
Object objVal = xsdStringToObject(obj);
if (objVal != null) {
n = schemaModel.createTypedLiteral(objVal);
}
else {
n = ResourceFactory.createResource(obj);
}
}
}
Resource r = null;
if (sub != null) {
r = getOntResource(sub);
if (r == null) {
if (dataModel != null && p.equals(RDF.type) && n.canAs(OntClass.class)) {
r = dataModel.createIndividual(sub, n.as(OntClass.class));
}
else {
r = ResourceFactory.createResource(sub);
}
}
}
RDFNode[] spo = new RDFNode[3];
spo[0] = r;
spo[1] = p;
spo[2] = n;
return spo;
}
private Resource getOntResource(String uri) throws MalformedURLException {
Resource r = null;
if (schemaModel.getOntClass(uri) != null || schemaModel.getIndividual(uri) != null) {
r = schemaModel.getResource(uri);
}
else if (dataModel != null && (dataModel.getOntClass(uri) != null || dataModel.getIndividual(uri) != null)) {
r = dataModel.getResource(uri);
}
if (r == null) {
// look in imports
if (imports != null) {
for (int i = 0; i < imports.size(); i++) {
ImportMapping im = imports.get(i);
if (im != null) {
OntModel impModel = im.getModel();
if (impModel != null) {
r = impModel.getOntResource(uri);
if (r != null) {
break;
}
}
}
}
// so we've failed to here; check to see if the t-box is stale (
if (tboxLoadTime > 0) {
SadlUtils su = new SadlUtils();
for (int i = 0; i < imports.size(); i++) {
ImportMapping im = imports.get(i);
if (im != null) {
String actualUrl = im.getActualURL();
if (actualUrl.startsWith("file:")) {
File impFile = new File(su.fileUrlToFileName(actualUrl));
if (impFile.exists()) {
if (impFile.lastModified() > tboxLoadTime) {
// reload the file
OntModel m = im.getModel();
m.remove(m);
m.read(actualUrl);
}
}
}
}
}
for (int i = 0; i < imports.size(); i++) {
ImportMapping im = imports.get(i);
if (im != null) {
OntModel impModel = im.getModel();
if (impModel != null) {
r = impModel.getOntResource(uri);
if (r != null) {
break;
}
}
}
}
}
}
}
return r;
}
/**
* This method returns the category (name) of this specific reasoner.
* This is a "root" category name, which will have sub-categories.
*/
public String getConfigurationCategory() {
return ReasonerCategory;
}
public Map<String, ConfigurationOption> getReasonerConfigurationOptions() {
Map<String, ConfigurationOption> map = new HashMap<String, ConfigurationOption>();
String[] categoryHierarchy = {ReasonerCategory};
String[] ruleModeOptions = {GenericRuleReasoner.HYBRID.toString(),
GenericRuleReasoner.FORWARD.toString(),
GenericRuleReasoner.BACKWARD.toString(),
GenericRuleReasoner.FORWARD_RETE.toString()};
String[] modelSpecOptions = {OWL_MEM, OWL_MEM_RDFS, OWL_MEM_TRANS, OWL_MEM_RULE, OWL_MEM_MICRO_RULE, OWL_MEM_MINI_RULE,
OWL_DL_MEM, OWL_DL_MEM_RDFS, OWL_DL_MEM_TRANS, OWL_DL_MEM_RULE, OWL_LITE_MEM, OWL_LITE_MEM_TRANS, OWL_LITE_MEM_RDFS,
OWL_LITE_MEM_RULE, RDFS_MEM, RDFS_MEM_TRANS, RDFS_MEM_RDFS};
String[] derivationOptions = {DERIVATION_NONE, DERIVATION_SHALLOW, DERIVATION_DEEP};
map.put(pModelSpec,
new ConfigurationOption(categoryHierarchy, pModelSpec, "Jena ontology model specification",
OWL_DL_MEM_RDFS, modelSpecOptions));
map.put(pTimeOut,
new ConfigurationOption(categoryHierarchy, pTimeOut, "Query timeout (seconds, -1 for no limit)", "-1", null));
map.put(pRuleMode,
new ConfigurationOption(categoryHierarchy, pRuleMode, "Jena reasoner mode", GenericRuleReasoner.HYBRID.toString(), ruleModeOptions ));
map.put(pOWLTranslation,
new ConfigurationOption(categoryHierarchy, pOWLTranslation , "Translate some OWL constructs (intersection) to rules", false, booleanOptions));
map.put(pTransitiveClosureCaching,
new ConfigurationOption(categoryHierarchy, pTransitiveClosureCaching , "Cache transitive inferences to improve performance (may not work with all rules sets)", false, booleanOptions));
map.put(pDerivationLogging,
new ConfigurationOption(categoryHierarchy, pDerivationLogging, "Track and display derivations", DERIVATION_NONE, derivationOptions));
map.put(pTrace,
new ConfigurationOption(categoryHierarchy, pTrace, "Log rule activity to startup console (run with -console option)", false, booleanOptions));
map.put(pUseLuceneIndexer,
new ConfigurationOption(categoryHierarchy, pUseLuceneIndexer, "Use Custom Lucene Indexer", false, booleanOptions));
return map;
}
protected String getDerivationLevel() {
return getStringConfigurationValue(null, pDerivationLogging, DERIVATION_NONE);
}
protected OntModelSpec getModelSpec(List<ConfigurationItem> preferences) {
Object modelSpecID = findPreference(preferences, pModelSpec);
if (modelSpecID != null) {
configure(findConfigurationItem(preferences, pModelSpec));
}
if (modelSpecID == null && configuration != null) {
modelSpecID = configuration.get(pModelSpec);
}
if (modelSpecID != null) {
OntModelSpec ms = new OntModelSpec(getModelSpec(modelSpecID.toString()));
ms.setDocumentManager(configurationMgr.getJenaDocumentMgr());
return ms;
}
OntModelSpec ms = new OntModelSpec(OntModelSpec.OWL_MEM);
ms.setDocumentManager(configurationMgr.getJenaDocumentMgr());
return ms;
}
protected OntModelSpec getModelSpec(String modelSpecID) {
if (modelSpecID.equals(OWL_DL_MEM)) {
return OntModelSpec.OWL_DL_MEM;
}
else if (modelSpecID.equals(OWL_DL_MEM_RDFS)) {
return OntModelSpec.OWL_DL_MEM_RDFS_INF;
}
else if (modelSpecID.equals(OWL_DL_MEM_RULE)) {
return OntModelSpec.OWL_DL_MEM_RULE_INF;
}
else if (modelSpecID.equals(OWL_DL_MEM_TRANS)) {
return OntModelSpec.OWL_DL_MEM_TRANS_INF;
}
else if (modelSpecID.equals(OWL_LITE_MEM)) {
return OntModelSpec.OWL_LITE_MEM;
}
else if (modelSpecID.equals(OWL_LITE_MEM_RDFS)) {
return OntModelSpec.OWL_LITE_MEM_RDFS_INF;
}
else if (modelSpecID.equals(OWL_LITE_MEM_RULE)) {
return OntModelSpec.OWL_LITE_MEM_RULES_INF;
}
else if (modelSpecID.equals(OWL_LITE_MEM_TRANS)) {
return OntModelSpec.OWL_LITE_MEM_TRANS_INF;
}
else if (modelSpecID.equals(OWL_MEM)) {
return OntModelSpec.OWL_MEM;
}
else if (modelSpecID.equals(OWL_MEM_MICRO_RULE)) {
return OntModelSpec.OWL_MEM_MICRO_RULE_INF;
}
else if (modelSpecID.equals(OWL_MEM_MINI_RULE)) {
return OntModelSpec.OWL_MEM_MINI_RULE_INF;
}
else if (modelSpecID.equals(OWL_MEM_RDFS)) {
return OntModelSpec.OWL_MEM_RDFS_INF;
}
else if (modelSpecID.equals(OWL_MEM_RULE)) {
return OntModelSpec.OWL_MEM_RULE_INF;
}
else if (modelSpecID.equals(OWL_MEM_TRANS)) {
return OntModelSpec.OWL_MEM_TRANS_INF;
}
else if (modelSpecID.equals(RDFS_MEM)) {
return OntModelSpec.RDFS_MEM;
}
else if (modelSpecID.equals(RDFS_MEM_RDFS)) {
return OntModelSpec.RDFS_MEM_RDFS_INF;
}
else if (modelSpecID.equals(RDFS_MEM_TRANS)) {
return OntModelSpec.RDFS_MEM_TRANS_INF;
}
return OntModelSpec.OWL_MEM;
}
protected com.hp.hpl.jena.reasoner.rulesys.GenericRuleReasoner.RuleMode getRuleMode(List<ConfigurationItem> preferences) throws ConfigurationException {
Object ruleModeID = findPreference(preferences, pRuleMode);
if (ruleModeID != null) {
configure(findConfigurationItem(preferences, pRuleMode));
}
if (ruleModeID == null && configuration != null) {
ruleModeID = configuration.get(pRuleMode);
}
if (ruleModeID != null) {
return getRuleMode(ruleModeID.toString());
}
return GenericRuleReasoner.HYBRID;
}
protected com.hp.hpl.jena.reasoner.rulesys.GenericRuleReasoner.RuleMode getRuleMode(String ruleModeID) throws ConfigurationException {
if (ruleModeID.equals(GenericRuleReasoner.BACKWARD.toString())) {
return GenericRuleReasoner.BACKWARD;
}
else if (ruleModeID.equals(GenericRuleReasoner.FORWARD.toString())) {
return GenericRuleReasoner.FORWARD;
}
else if (ruleModeID.equals(GenericRuleReasoner.FORWARD_RETE.toString())) {
return GenericRuleReasoner.FORWARD_RETE;
}
else if (ruleModeID.equals(GenericRuleReasoner.HYBRID.toString())) {
return GenericRuleReasoner.HYBRID;
}
else {
throw new ConfigurationException("Invalid Jena Reasoner mode: " + ruleModeID);
}
}
public void enableExplanation(boolean bVal) {
if (reasoner != null) {
try {
if (getReasonerOnlyWhenNeeded() != null) {
getReasonerOnlyWhenNeeded().setDerivationLogging(bVal);
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
explanationsEnabled = true;
}
}
public boolean isExplanationEnabled() {
return !getDerivationLevel().equals(DERIVATION_NONE);
}
public boolean saveInferredModel(String filename, String modelname, boolean deductionsOnly) throws FileNotFoundException {
try {
prepareInfModel();
} catch (ConfigurationException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
if (infModel != null) {
OntModel m;
if (deductionsOnly && infModel instanceof InfModel) {
m = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null), ((InfModel) infModel).getDeductionsModel());
}
else {
m = ModelFactory.createOntologyModel(configurationMgr.getOntModelSpec(null), infModel);
}
String format = "RDF/XML-ABBREV";
FileOutputStream fps = new FileOutputStream(filename);
RDFWriter rdfw = m.getWriter(format);
rdfw.write(m, fps, modelname);
try {
fps.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return false;
}
// works 6/3/2014 12:48 p.m.
public boolean reset() {
// if (infModel != null) {
// System.out.println("Before rebind, infModel size is: " + infModel.size());
if (!initialized) {
return false;
}
// if (dataModel != null) {
// System.out.println("Before removeAll, dataModel size is: " + dataModel.size());
// dataModel.getBaseModel().removeAll();
// System.out.println("Before removeAll, tboxModelWithSpec size is: " + tboxModelWithSpec.size());
// tboxModelWithSpec.removeAll();
// System.out.println("After basemodel removeAll, dataModel size is: " + dataModel.size());
//// dataModel.removeAll();
//// System.out.println("After removeAll, dataModel size is: " + dataModel.size());
infModel = null;
dataModel = null;
tboxModelWithSpec = null;
// prepareInfModel();
if (infModel != null && infModel instanceof InfModel) {
((InfModel)infModel).rebind();
// System.out.println("After rebind, infModel size is: " + infModel.size());
}
// what else do we need to do?
return true;
}
// public boolean reset() {
// if (infModel != null) {
// System.out.println("On reset, infModel size is: " + infModel.size());
// if (!initialized) {
// return false;
// if (dataModel != null) {
// System.out.println("Before removeAll, dataModel size is: " + dataModel.size());
// dataModel.removeAll();
// System.out.println("After removeAll, dataModel size is: " + dataModel.size());
// infModel = null;
// dataModel = null;
//// prepareInfModel();
//// if (infModel != null) {
//// infModel.rebind();
// // what else do we need to do?
// return true;
public String objectValueToStringValue(Object objValue, String predicate) throws ConfigurationException {
getReasonerOnlyWhenNeeded();
if (schemaModel != null) {
OntProperty pred = null;
Property nonOntPred = null;
if (predicate != null) {
pred = schemaModel.getOntProperty(predicate);
if (pred == null) {
nonOntPred = schemaModel.getProperty(predicate);
if (nonOntPred != null) {
String msg = "Found predicate but it isn't an OntProperty";
logger.debug(msg); addError(new ModelError(msg, ErrorType.ERROR));
}
}
}
RDFNode val = null;
if (pred != null && pred.isDatatypeProperty()) {
if (pred.getRange() != null) {
try {
val = UtilsForJena.getLiteralMatchingDataPropertyRange(schemaModel, pred, objValue);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else {
val = schemaModel.createTypedLiteral(objValue);
}
}
else {
if (pred == null && objValue instanceof String && !(((String)objValue).startsWith("http://" ) && ((String)objValue).contains("
val = schemaModel.createTypedLiteral(objValue);
}
else {
val = schemaModel.getIndividual(objValue.toString());
}
}
if (val != null) {
return val.toString();
}
}
return objValue.toString();
}
public String prepareQuery(String query) throws InvalidNameException, ConfigurationException {
getReasonerOnlyWhenNeeded();
OntModel model = null;
if (dataModel != null) {
model = dataModel;
}
else if (schemaModel != null) {
model = schemaModel;
}
if (model != null) {
ReasonerTiming rt = null;
long t1 = 0L;
if (collectTimingInfo) {
rt = new ReasonerTiming(TIMING_PREPARE_QUERY, "prepare query (" + query + ")", 0); // do this now to pick up query text before preparation
t1 = System.currentTimeMillis();
}
if (configurationMgr != null) {
ITranslator translator = configurationMgr.getTranslatorForReasoner(ReasonerCategory);
if (translator != null) {
translator.setConfigurationManager(configurationMgr);
query = translator.prepareQuery(model, query);
if (collectTimingInfo) {
long t2 = System.currentTimeMillis();
rt.setMilliseconds(t2 - t1);
timingInfo.add(rt);
}
}
else {
throw new ConfigurationException("Unable to obtain a translator.");
}
}
else {
throw new ConfigurationException("No ConfigurationManager availalble.");
}
}
return query;
}
public String getReasonerFamily() {
return ReasonerFamily;
}
public Class<?> getBuiltinClass() {
return Builtin.class;
}
public BuiltinInfo getBuiltinInfo(Class<?> bcls) {
try {
Builtin binst = (Builtin) this.getClass().getClassLoader().loadClass(bcls.getCanonicalName()).newInstance();
return new BuiltinInfo(binst.getName(), bcls.getCanonicalName(), getReasonerFamily(),
binst.getArgLength());
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
public List<BuiltinInfo> getImplicitBuiltins() {
List<BuiltinInfo> implbltins = new ArrayList<BuiltinInfo>();
String pkg = "com.hp.hpl.jena.reasoner.rulesys.builtins";
String[] impbuiltinnames = {
"AddOne", "Bound", "CountLiteralValues", "IsBNode", "IsDType",
"IsLiteral", "ListContains", "ListEntry", "ListEqual", "ListLength",
"ListMapAsObject", "ListMapAsSubject", "ListNotContains", "ListNotEqual",
"NotBNode", "NotDType", "NotLiteral", "Now", "Regex", "StrConcat", "Table",
"TableAll", "Unbound", "UriConcat"};
for (int i = 0; i < impbuiltinnames.length; i++) {
String fqn = pkg + "." + impbuiltinnames[i];
try {
Class<?> builtinCls = Class.forName(fqn, true, Thread.currentThread().getContextClassLoader());
Constructor<?> c = builtinCls.getConstructor();
Object javaInstance = c.newInstance();
if (javaInstance instanceof Builtin) {
BuiltinInfo bi = new BuiltinInfo(((Builtin)javaInstance).getName(), fqn, getReasonerFamily(), ((Builtin)javaInstance).getArgLength());
implbltins.add(bi);
}
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SecurityException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoSuchMethodException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InstantiationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvocationTargetException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
return implbltins;
}
public DataSource getDerivations() throws InvalidDerivationException, ConfigurationException {
getReasonerOnlyWhenNeeded();
if (getDerivationLevel().equals(DERIVATION_NONE)){
return null;
}
try {
prepareInfModel();
StmtIterator sitr;
if (infModel instanceof InfModel) {
sitr = ((InfModel) infModel).getDeductionsModel().listStatements();
}
else {
sitr = infModel.listStatements();
}
if (sitr.hasNext()) {
StringWriter swriter = new StringWriter();
PrintWriter out = new PrintWriter(swriter);
out.println("Derivations from instance data combined with model '" + tbox + "', " + now() + "\n");
writeStatementDerivations(out, null, sitr);
String derivations = swriter.toString();
out.close();
StringDataSource ds = new StringDataSource(derivations, "text/plain");
ds.setName("Derivations");
return ds;
}
} catch (ConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
protected void writeStatementDerivations(PrintWriter out, HashSet<Derivation> seen, StmtIterator sitr) throws InvalidDerivationException {
while (sitr.hasNext()) {
Statement s = sitr.nextStatement();
out.println(tripleToString(s));
// out.println(s.getSubject().getURI() + " " + s.getPredicate().getURI() + " " + objVal);
if (!getDerivationLevel().equals(DERIVATION_NONE)) {
int cnt = 0;
//Iterator itr = infModel.getDerivation(s);
Iterator<Derivation> itr = getDerivation(s.asTriple());
if (itr != null) {
while (itr.hasNext()) {
Derivation d = (Derivation) itr.next();
if (getDerivationLevel().equals(DERIVATION_SHALLOW)) {
printShallowDerivationTrace(infModel.getGraph(), d, out, 0, 0, false);
}
else {
if (!derivationAlreadyShown(d, seen, out, 0)) {
// must be DERIVATION_DEEP
if (seen == null) {
seen = new HashSet<Derivation>();
}
printDeepDerivationTrace(infModel.getGraph(), d, out, true, 0, 0, seen, false);
}
}
cnt++;
}
if (cnt > 0) {
out.print("\n");
}
}
}
}
}
public void printShallowDerivationTrace(Graph graph, Derivation d, PrintWriter out, int indent, int level, boolean includeValue) throws InvalidDerivationException {
if (includeValue) {
out.print(derivationValueToString(d));
}
out.println(shallowDerivationToString(d, indent, level, includeValue));
}
public String shallowDerivationToString(Derivation d, int indent, int level, boolean includeValue) throws InvalidDerivationException {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < indent; i++) {
sb.append(" ");
}
if (includeValue) {
sb.append(derivationValueToString(d));
sb.append(" ");
}
sb.append(" set by ");
sb.append(d.toString());
sb.append("\n");
return sb.toString();
}
public void printDeepDerivationTrace(Graph infGraph, Derivation d, PrintWriter out, boolean bindings, int indent,
int level, HashSet<Derivation> seen, boolean includeValue) throws InvalidDerivationException {
if (d instanceof RuleDerivation) {
if (bindings) {
PrintUtil.printIndent(out, indent + 2);
out.println((level > 0 ? "which " : "") + "was concluded by: " + d.toString() + "\n");
}
int margin = indent + 4;
List<Triple> matches = ((RuleDerivation) d).getMatches();
if (matches != null && matches.size() > 0) {
PrintUtil.printIndent(out, indent + 2);
out.println("based on matching conditions:\n");
for (int i = 0; i < matches.size(); i++) {
Triple match = matches.get(i);
Iterator<Derivation> derivations = getDerivation(match);
if (derivations == null || !derivations.hasNext()) {
PrintUtil.printIndent(out, indent + 4);
if (match == null) {
// A primitive
ClauseEntry term = ((RuleDerivation) d).getRule().getBodyElement(i);
if (term instanceof Functor) {
out.println(((Functor)term).getName() + "()");
} else {
out.println("call to built in");
}
} else {
out.println(tripleToString(match));
}
} else {
PrintUtil.printIndent(out, indent + 4);
out.println(tripleToString(match));
while (derivations.hasNext()) {
Derivation derivation = (Derivation)derivations.next();
if (!derivationAlreadyShown(derivation, seen, out, margin)) {
if (seen != null && !seen.contains(derivation)) {
seen.add(derivation);
}
printDeepDerivationTrace(infGraph, derivation, out, bindings, margin, level + 1, seen, includeValue);
}
}
}
}
}
}
else {
PrintUtil.printIndent(out, indent + 2);
out.println(d.toString());
}
}
public boolean derivationAlreadyShown(Derivation derivation, HashSet<Derivation> seen, PrintWriter out, int margin) {
if (seen != null && derivation instanceof RuleDerivation && seen.contains(derivation)) {
PrintUtil.printIndent(out, margin + 4);
out.println("- explanation already shown");
return true;
}
return false;
}
public String tripleToString(Statement t) {
StringBuffer sb = new StringBuffer();
sb.append(tripleSubjectToString(t.getSubject()));
sb.append(" ");
Property p = t.getPredicate();
if (p.isProperty()) {
sb.append(p.getLocalName());
}
else {
sb.append("[unexpected non-property]: " + p.toString());
}
sb.append(" ");
RDFNode n = t.getObject();
sb.append(tripleValueToString(n));
return sb.toString();
}
public String tripleSubjectToString(Resource s) {
if (!s.isAnon()) {
return s.getLocalName();
}
else {
return s.toString();
}
}
public String tripleValueToString(RDFNode n) {
StringBuffer sb = new StringBuffer();
if (n.canAs(Literal.class)) {
Object val = ((Literal)n.as(Literal.class)).getValue();
if (val instanceof String) {
sb.append("\"");
sb.append(val.toString());
sb.append("\"");
}
else {
sb.append(val.toString());
}
}
else if (n.canAs(Resource.class)){
if (n.isAnon()) {
// sb.append("<blank node>");
sb.append(n.toString());
}
else {
sb.append(((Resource)n.as(Resource.class)).getLocalName());
}
}
else {
sb.append(n.toString());
}
return sb.toString();
}
public String derivationValueToString(Derivation d) throws InvalidDerivationException {
if (d instanceof RuleDerivation) {
return tripleValueToString(((RuleDerivation)d).getConclusion().getObject());
}
throw new InvalidDerivationException("Unexpected Derivation type: " + d.toString());
}
/**
* Call to begin tracing
* @return
*/
private boolean startTrace() {
// if (traceAppender != null) {
// LoggerFactory lf;
// Logger logger = logger.getRootLogger();
// logger.addAppender(traceAppender);
// LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
// try {
// JoranConfigurator configurator = new JoranConfigurator();
// configurator.setContext(lc);
// // the context was probably already configured by default configuration
// // rules
// lc.reset();
// configurator.doConfigure(args[0]);
// } catch (JoranException je) {
// je.printStackTrace();
// return true;
return false;
}
/**
* Call to end tracing
*
* @return
*/
private boolean endTrace() {
// if (traceAppender != null) {
// // logger.removeAppender(appender);
// return true;
return false;
}
public List<ReasonerTiming> getTimingInformation() {
return timingInfo;
}
public boolean collectTimingInformation(boolean bCollect) {
boolean oldVal = collectTimingInfo;
collectTimingInfo = bCollect;
return oldVal;
}
public String getReasonerVersion() {
return version;
}
public void setOutputFormat(String outputFmt) {
if (outputFmt != null &&
(outputFmt.equals(IConfigurationManager.N3_FORMAT) ||
outputFmt.equals(IConfigurationManager.N_TRIPLE_FORMAT) ||
outputFmt.equals(IConfigurationManager.RDF_XML_ABBREV_FORMAT) ||
outputFmt.equals(IConfigurationManager.RDF_XML_FORMAT))) {
this.outputFormat = outputFmt;
}
}
public String getOutputFormat() {
return outputFormat;
}
protected void setModelName(String modelName) {
this.modelName = modelName;
}
protected String getModelName() {
return modelName;
}
protected void setInstanceDataNS(String instDataNS) {
this.instDataNS = instDataNS;
}
protected String getInstanceDataNS() {
return instDataNS;
}
protected String getInstanceModelName() {
if (instDataNS != null) {
if (instDataNS.endsWith("
return instDataNS.substring(0, instDataNS.length() - 1);
}
return instDataNS;
}
return null;
}
public void setInstanceDataNamespace(String ns) {
setInstanceDataNS(ns);
}
public String getInstanceDataNamespace() {
return getInstanceDataNS();
}
public void setModelInputFormat(String owlModelFormat) {
// TODO Auto-generated method stub
}
public void setQueryTimeout(long timeout) {
logger.info("Setting query timeout to "+timeout+" ms.");
this.queryTimeout = timeout;
}
public Model getInferredModel(boolean deductionsOnly) throws ConfigurationException {
prepareInfModel();
if (deductionsOnly && infModel instanceof InfModel) {
return ((InfModel)infModel).getDeductionsModel();
}
return infModel;
}
public boolean clearCache() throws InvalidNameException {
return true;
}
@Override
public List<ModelError> getErrors() {
List<ModelError> returning = newErrors;
newErrors = null;
return returning;
}
private void addError(ModelError newError) {
if (newErrors == null) {
newErrors = new ArrayList<ModelError>();
}
newErrors.add(newError);
}
}
|
package org.jboss.as.controller;
import java.util.ResourceBundle;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.jboss.as.controller.client.helpers.MeasurementUnit;
import org.jboss.as.controller.descriptions.ModelDescriptionConstants;
import org.jboss.as.controller.operations.validation.ParameterValidator;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.ModelType;
public abstract class AttributeDefinition {
private final String name;
private final String xmlName;
private final ModelType type;
private final boolean allowNull;
private final boolean allowExpression;
private final ModelNode defaultValue;
private final MeasurementUnit measurementUnit;
private final String[] alternatives;
private final ParameterValidator validator;
protected AttributeDefinition(String name, String xmlName, final ModelNode defaultValue, final ModelType type,
final boolean allowNull, final boolean allowExpression, final MeasurementUnit measurementUnit,
final ParameterValidator validator, final String[] alternatives) {
this.name = name;
this.xmlName = xmlName;
this.type = type;
this.allowNull = allowNull;
this.allowExpression = allowExpression;
this.defaultValue = new ModelNode();
if (defaultValue != null) {
this.defaultValue.set(defaultValue);
}
this.defaultValue.protect();
this.measurementUnit = measurementUnit;
this.alternatives = alternatives;
this.validator = validator;
}
public String getName() {
return name;
}
public String getXmlName() {
return xmlName;
}
public ModelType getType() {
return type;
}
public boolean isAllowNull() {
return allowNull;
}
public boolean isAllowExpression() {
return allowExpression;
}
public ModelNode getDefaultValue() {
return defaultValue.isDefined() ? defaultValue : null;
}
public MeasurementUnit getMeasurementUnit() {
return measurementUnit;
}
public ParameterValidator getValidator() {
return validator;
}
public String[] getAlternatives() {
return alternatives;
}
/**
* Gets whether the given {@code resourceModel} has a value for this attribute that should be marshalled to XML.
* <p>
* This is the same as {@code isMarshallable(resourceModel, true)}.
* </p>
*
* @param resourceModel the model, a non-null node of {@link ModelType#OBJECT}.
*
* @return {@code true} if the given {@code resourceModel} has a defined value under this attribute's {@link #getName()} () name}.
*/
public boolean isMarshallable(final ModelNode resourceModel) {
return isMarshallable(resourceModel, true);
}
/**
* Gets whether the given {@code resourceModel} has a value for this attribute that should be marshalled to XML.
*
* @param resourceModel the model, a non-null node of {@link ModelType#OBJECT}.
* @param marshallDefault {@code true} if the value should be marshalled even if it matches the default value
*
* @return {@code true} if the given {@code resourceModel} has a defined value under this attribute's {@link #getName()} () name}
* and {@code marshallDefault} is {@code true} or that value differs from this attribute's {@link #getDefaultValue() default value}.
*/
public boolean isMarshallable(final ModelNode resourceModel, final boolean marshallDefault) {
return resourceModel.hasDefined(name) && (marshallDefault || !resourceModel.get(name).equals(defaultValue));
}
/**
* Finds a value in the given {@code operationObject} whose key matches this attribute's {@link #getName() name} and
* validates it using this attribute's {@link #getValidator() validator}.
*
* @param operationObject model node of type {@link ModelType#OBJECT}, typically representing an operation request
*
* @return the value
* @throws OperationFailedException if the value is not valid
*/
public final ModelNode validateOperation(final ModelNode operationObject) throws OperationFailedException {
ModelNode node = new ModelNode();
if (operationObject.has(name)) {
node.set(operationObject.get(name));
}
if (!node.isDefined() && defaultValue.isDefined()) {
validator.validateParameter(name, defaultValue);
} else {
validator.validateParameter(name, node);
}
return node;
}
/**
* Finds a value in the given {@code operationObject} whose key matches this attribute's {@link #getName() name},
* validates it using this attribute's {@link #getValidator() validator}, and, stores it under this attribute's name in the given {@code model}.
*
* @param operationObject model node of type {@link ModelType#OBJECT}, typically representing an operation request
* @param model model node in which the value should be stored
*
* @throws OperationFailedException if the value is not valid
*/
public final void validateAndSet(final ModelNode operationObject, final ModelNode model) throws OperationFailedException {
ModelNode node = validateOperation(operationObject);
model.get(name).set(node);
}
/**
* Finds a value in the given {@code operationObject} whose key matches this attribute's {@link #getName() name},
* resolves it and validates it using this attribute's {@link #getValidator() validator}. If the value is
* undefined and a {@link #getDefaultValue() default value} is available, the default value is used.
*
* @param operationObject model node of type {@link ModelType#OBJECT}, typically representing an operation request
*
* @return the resolved value, possibly the default value if the operation does not have a defined value matching
* this attribute's name
* @throws OperationFailedException if the value is not valid
*/
public final ModelNode validateResolvedOperation(final ModelNode operationObject) throws OperationFailedException {
ModelNode node = new ModelNode();
if (operationObject.has(name)) {
node.set(operationObject.get(name));
}
if (!node.isDefined() && defaultValue.isDefined()) {
node.set(defaultValue);
}
final ModelNode resolved = node.resolve();
validator.validateParameter(name, resolved);
return resolved;
}
public boolean isAllowed(final ModelNode operationObject) {
if(alternatives != null) {
for(final String alternative : alternatives) {
if(operationObject.has(alternative)) {
return false;
}
}
}
return true;
}
public boolean isRequired(final ModelNode operationObject) {
final boolean required = ! allowNull;
return required ? ! hasAlternative(operationObject) : required;
}
public boolean hasAlternative(final ModelNode operationObject) {
if(alternatives != null) {
for(final String alternative : alternatives) {
if(operationObject.has(alternative)) {
return true;
}
}
}
return false;
}
/**
* Marshalls the value from the given {@code resourceModel} as an xml element, if it
* {@link #isMarshallable(org.jboss.dmr.ModelNode, boolean) is marshallable}.
*
* @param resourceModel the model, a non-null node of {@link org.jboss.dmr.ModelType#OBJECT}.
* @param writer stream writer to use for writing the attribute
* @throws javax.xml.stream.XMLStreamException if thrown by {@code writer}
*/
public abstract void marshallAsElement(final ModelNode resourceModel, final XMLStreamWriter writer) throws XMLStreamException;
/**
* Creates a returns a basic model node describing the attribute, after attaching it to the given overall resource
* description model node. The node describing the attribute is returned to make it easy to perform further
* modification.
*
* @param bundle resource bundle to use for text descriptions
* @param prefix prefix to prepend to the attribute name key when looking up descriptions
* @param resourceDescription the overall resource description
* @return the attribute description node
*/
public ModelNode addResourceAttributeDescription(final ResourceBundle bundle, final String prefix, final ModelNode resourceDescription) {
final ModelNode attr = new ModelNode();
attr.get(ModelDescriptionConstants.TYPE).set(type);
attr.get(ModelDescriptionConstants.DESCRIPTION).set(getAttributeTextDescription(bundle, prefix));
attr.get(ModelDescriptionConstants.EXPRESSIONS_ALLOWED).set(isAllowExpression());
attr.get(ModelDescriptionConstants.NILLABLE).set(isAllowNull());
if (defaultValue != null && defaultValue.isDefined()) {
attr.get(ModelDescriptionConstants.DEFAULT).set(defaultValue);
}
if (measurementUnit != MeasurementUnit.NONE) {
attr.get(ModelDescriptionConstants.UNIT).set(measurementUnit.getName());
}
if (alternatives != null) {
for(final String alternative : alternatives) {
attr.get(ModelDescriptionConstants.ALTERNATIVES).add(alternative);
}
}
final ModelNode result = resourceDescription.get(ModelDescriptionConstants.ATTRIBUTES, getName()).set(attr);
return result;
}
/**
* Creates a returns a basic model node describing a parameter that sets this attribute, after attaching it to the
* given overall operation description model node. The node describing the parameter is returned to make it easy
* to perform further modification.
*
* @param bundle resource bundle to use for text descriptions
* @param prefix prefix to prepend to the attribute name key when looking up descriptions
* @param operationDescription the overall resource description
* @return the attribute description node
*/
public ModelNode addOperationParameterDescription(final ResourceBundle bundle, final String prefix, final ModelNode operationDescription) {
final ModelNode param = new ModelNode();
param.get(ModelDescriptionConstants.TYPE).set(type);
param.get(ModelDescriptionConstants.DESCRIPTION).set(getAttributeTextDescription(bundle, prefix));
param.get(ModelDescriptionConstants.EXPRESSIONS_ALLOWED).set(isAllowExpression());
param.get(ModelDescriptionConstants.REQUIRED).set(!isAllowNull());
param.get(ModelDescriptionConstants.NILLABLE).set(isAllowNull());
if (measurementUnit != MeasurementUnit.NONE) {
param.get(ModelDescriptionConstants.UNIT).set(measurementUnit.getName());
}
if (alternatives != null) {
for(final String alternative : alternatives) {
param.get(ModelDescriptionConstants.ALTERNATIVES).add(alternative);
}
}
final ModelNode result = operationDescription.get(ModelDescriptionConstants.REQUEST_PROPERTIES, getName()).set(param);
return result;
}
public String getAttributeTextDescription(final ResourceBundle bundle, final String prefix) {
final String bundleKey = prefix == null ? name : (prefix + "." + name);
return bundle.getString(bundleKey);
}
}
|
package org.koala.runnersFramework.runners.bot;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.Timer;
import java.util.TimerTask;
import ibis.ipl.ConnectionFailedException;
import ibis.ipl.IbisIdentifier;
import ibis.ipl.ReadMessage;
import ibis.ipl.ReceiveTimedOutException;
import ibis.ipl.SendPort;
import ibis.ipl.SendPortIdentifier;
import ibis.ipl.WriteMessage;
import org.koala.runnersFramework.runners.bot.listener.BatsServiceApiImpl;
public class ExecutionPhaseMaster extends Master {
Schedule schedule;
long timeOfLastSchedule;
private long actualStartTime;
private long lastReconfigTime;
private double va;
private double ca;
Timer timer;
String electionName;
protected ExecutionPhaseMaster(BoTRunner aBot, Schedule selectedSchedule) throws Exception {
super(aBot);
electionName=aBot.electionName;
schedule = selectedSchedule;
double zeta_sq = bot.zeta * bot.zeta;
bot.subsetLength = (int) Math.ceil(bot.tasks.size() * zeta_sq
/ (zeta_sq + 2 * (bot.tasks.size() - 1) * bot.delta * bot.delta));
/*initialize the clusters with subset jobs results obtained from LR+Sampling
* to smooth the merge*/
System.out.println("Subset length is " + bot.subsetLength + " totalNumberTasks: " + totalNumberTasks);
//bot.noInitialWorkers = bot.noSampleJobs ;
//bot.noInitialWorkers = 1;
Collection<Cluster> clusters = bot.Clusters.values();
for (Cluster cluster : clusters) {
HashMap<String, WorkerStats> workersCluster = new HashMap<String, WorkerStats>();
workers.put(cluster.alias, workersCluster);
cluster.setCrtNodes(0);
cluster.setPendingNodes(0);
int necn = 0;
if (schedule.machinesPerCluster.containsKey(cluster.alias)){
necn = schedule.machinesPerCluster.get(cluster.alias);
}
cluster.setNecNodes(necn);
cluster.noATUPlan=schedule.atus;
cluster.firstStats = true;
cluster.sampleSetDone();
cluster.initialTi = cluster.Ti;
System.out.println("cluster " + cluster.alias + " : \t Ti " + cluster.Ti);
}
bot.budget = schedule.budget;
timer = new Timer();
lastReconfigTime = System.currentTimeMillis();
}
@Override
protected void handleLostConnections() {
String cluster;
String node;
for(SendPortIdentifier lost : masterRP.lostConnections()) {
cluster = lost.ibisIdentifier().location().getParent().toString();
node = lost.ibisIdentifier().location().getLevel(0);
if(( workers.get(cluster).get(node) != null) && (! workers.get(cluster).get(node).isFinished())) {
for(Job j : bot.Clusters.get(cluster).subsetJobs.values())
if (j.getNode().compareTo(node)==0) {
bot.Clusters.get(cluster).subsetJobs.remove(j.getJobID());
bot.tasks.add(j);
workers.get(cluster).get(j.getNode()).workerFinished(System.currentTimeMillis());
bot.Clusters.get(cluster).setCrtNodes(bot.Clusters.get(cluster).getCrtNodes()-1);
Date d = new Date();
System.err.println(d.toString() + ": Node " + node + " in cluster " + cluster +
" failed during execution of job " + j.jobID +
" ; cost: "
+ (Math.ceil((double)workers.get(cluster).get(j.getNode()).getUptime() / 60000 / bot.Clusters.get(cluster).timeUnit)
* bot.Clusters.get(cluster).costUnit));
break;
}
}
}
}
@Override
protected boolean areWeDone() {
/*check whether we finished*/
handleLostConnections();
/*speed up*/
if(bot.tasks.size() != 0) return false;
Collection<Cluster> clusters = bot.Clusters.values();
if (jobsDone == totalNumberTasks) {
/*disable connections*/
masterRP.disableConnections();
/*first check whether more workers are connected*/
for (SendPortIdentifier spi : masterRP.connectedTo()) {
String node = spi.ibisIdentifier().location().getLevel(0);
String cl = spi.ibisIdentifier().location().getParent().toString();
/*node connected but didn't manage to send a job request, either because it died or because it
* was slower than the other nodes*/
if ((workers.get(cl).get(node) == null) ||
/*node did not report job result back yet*/
(workers.get(cl).get(node).isFinished() == false)) {
timeout = 1;
return false;
}
}
try {
/*for(Process p : sshRunners.values())
if(p !=null) p.destroy();*/
double price = 0;
for (Cluster cluster : clusters) {
Collection<WorkerStats> wss = workers.get(cluster.alias).values();
System.out.println("Cluster " + cluster.hostname + " stats =>");
for (WorkerStats ws : wss) {
ws.printStats();
price += Math.ceil((double)ws.getUptime() / 60000 / cluster.timeUnit) * cluster.costUnit;
}
}
System.out.println("Due amount " + price);
long totalTime = (System.currentTimeMillis()-actualStartTime)/1000;
System.out.println("Application took " + totalTime + " (sec), which is about " + totalTime/60 + "m" + totalTime%60 +"s");
System.out.println("Hurray! I'm done with " + jobsDone + " jobs!!!");
masterRP.close();
System.out.println("Hurray! I shut down masterRP!!!");
myIbis.end();
System.out.println("Hurray! I shut down ibis!!!");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("Good bye!");
return true;
} return false;
}
private ArrayList<Item> updateClusterStats(boolean debug) {
/*compute T_i*/
Collection<Cluster> clusters = bot.Clusters.values();
ArrayList<Item> items = new ArrayList<Item>();
items.add(new Item(0,0,""));
bot.minCostATU = Integer.MAX_VALUE;
bot.maxCostATU = 0;
boolean tmp = true;
for(Cluster cluster : clusters) {
/*DEBUG*/
System.err.println("decide(): cluster " + cluster.alias);
/*DEBUG*/
System.err.println("decide(): in total " + (cluster.noDoneJobs + cluster.subsetJobs.size()));
/*added after Maricel's split!!!*/
cluster.updateTi();
if(cluster.Ti != 0 ) {
bot.minCostATU = (int) Math.min(bot.minCostATU, cluster.costUnit);
bot.maxCostATU += cluster.maxNodes*cluster.costUnit;
for(int i=0;i<cluster.maxNodes;i++)
/* worked with original knapsack limit budget/bot.totalNumberTasks
items.add(new Item((double)cluster.timeUnit/(bot.totalNumberTasks*cluster.Ti),
(double)cluster.costUnit/bot.totalNumberTasks,
cluster.alias));
*/
items.add(new Item(1/cluster.Ti,
(int) cluster.costUnit,
cluster.alias));
/*DEBUG*/
if(debug) System.err.println("Added machines from cluster " + cluster.alias + "; Items has now " + items.size());
System.err.println("cluster " + cluster.alias + ": Ti= " + cluster.Ti);
}
if(tmp && cluster.firstStats)
tmp = true;
else tmp = false;
}
if (tmp) {
if((!bot.allStatsReady) && (!bot.firstTimeAllStatsReady)) {
bot.firstTimeAllStatsReady=true;
bot.allStatsReady=true;
}
else if(bot.firstTimeAllStatsReady) {
bot.allStatsReady=true;
bot.firstTimeAllStatsReady=false;
System.err.println("No longer first time stats");
}
}
return items;
}
private void decideWithMovingAverage(boolean socketTimeout) {
// TODO Auto-generated method stub
/*compute averages*/
boolean dueTimeout = false;
long timeRightNow = System.currentTimeMillis();
if ( (timeRightNow - timeOfLastSchedule) >= timeout )
dueTimeout = true;
if(socketTimeout && (!dueTimeout)) return;
Collection<Cluster> clusters = bot.Clusters.values();
va = 0;
ca = 0;
double consumedBudget = 0;
int virtualJobsDone = 0;
double virtualJobsDoneDouble = 0.0;
double potentialJobsDone = 0.0;
ArrayList<Item> items = updateClusterStats(dueTimeout);
/*is it time to verify the configuration?*/
if((dueTimeout && bot.allStatsReady) || bot.firstTimeAllStatsReady) {
if(dueTimeout)
{System.out.println("Due timeout");}
if(bot.firstTimeAllStatsReady)
{System.out.println("First time ready!");}
System.out.println("Last reconfiguration time=" + (double)lastReconfigTime/60000);
for(Cluster cluster : clusters) {
/*compute the actual speed of this cluster using the worker stats*/
for(WorkerStats ws : workers.get(cluster.alias).values()) {
if(!ws.isFinished()) {
/*worker is idle*/
long crtUptime = ws.getUptime();
double tmp1;
int pottmp=0;
int atusBeforeReconfig = 0;
double v;
int tmp;
int noATUsPaidFor;
System.out.println("lastReconfigTime="+ lastReconfigTime + " ; startTime=" + ws.getStartTime());
if(ws.getStartTime() < lastReconfigTime) {
atusBeforeReconfig = (int) (Math.ceil((double)(lastReconfigTime-ws.getStartTime())/60000/cluster.timeUnit));
}
if(ws.getLatestJobStartTime() == 0) {
v = (double) (ws.getNoJobs()*60000000000L)/((double)ws.getRuntime());
noATUsPaidFor = (int) Math.ceil((double)crtUptime/ 60000L / cluster.timeUnit);
if(!ws.isMarked()) {
/*check if i should terminate this ATU*/
if((cluster.noATUPlan-(noATUsPaidFor-atusBeforeReconfig)) == 0) {
terminateWorker(cluster, ws, " current ATU is last");
cluster.necNodes
consumedBudget +=
noATUsPaidFor * cluster.costUnit;
long timeLeftATU = cluster.timeUnit*60000 - crtUptime%(cluster.timeUnit*60000);
tmp1=((double)timeLeftATU/60000)*v;
tmp = (int) Math.floor(((double)timeLeftATU/60000)*v);
ws.setTimeLeftATU(timeLeftATU);
System.out.println("(marked now) node " + ws.getIbisIdentifier().location().toString() +
": currentPaidForATU=" + noATUsPaidFor +
" ; timeLeftATU=" + timeLeftATU +
" ; atusBeforeReconfig=" + atusBeforeReconfig +
" ; virtualJobsDone="+tmp);
} else {
va += v;
consumedBudget += noATUsPaidFor * cluster.costUnit;
long currentPosition = crtUptime%(cluster.timeUnit*60000);
long timeLeftATU = cluster.timeUnit*60000 - currentPosition;
tmp1= ((double)timeLeftATU/60000)*v;
tmp = (int) Math.floor(((double)timeLeftATU/60000)*v);
ws.setTimeLeftATU(timeLeftATU);
ws.setSpeed(v);
ws.setFuturePoint((double)(cluster.timeUnit*60000 - currentPosition - (double)60000*tmp/v)/60000);
pottmp = (int)Math.floor(((double)(cluster.noATUPlan-(noATUsPaidFor-atusBeforeReconfig))*cluster.timeUnit
+ws.getFuturePoint())*v);
ws.setLastEstimatedUptime(noATUsPaidFor*cluster.timeUnit - ws.getFuturePoint());
System.out.println("node " + ws.getIbisIdentifier().location().toString() +
": currentPaidForATU=" + noATUsPaidFor +
" ; timeLeftATU=" + timeLeftATU +
" ; atusBeforeReconfig=" + atusBeforeReconfig +
" ; virtualJobsDone="+tmp +
" ; potentialJobsDone="+pottmp+
" ; futurePoint="+ws.getFuturePoint());
}
} else {
/*already marked*/
consumedBudget +=
noATUsPaidFor * cluster.costUnit;
long timeLeftATU = cluster.timeUnit*60000 - crtUptime%(cluster.timeUnit*60000);
tmp1=((double)timeLeftATU/60000)*v;
tmp = (int) Math.floor(((double)timeLeftATU/60000)*v);
ws.setTimeLeftATU(timeLeftATU);
System.out.println("(marked) node " + ws.getIbisIdentifier().location().toString() +
": currentPaidForATU=" + noATUsPaidFor +
" ; timeLeftATU=" + timeLeftATU +
" ; atusBeforeReconfig=" + atusBeforeReconfig +
" ; virtualJobsDone="+tmp);
}
virtualJobsDone += tmp;
virtualJobsDoneDouble += tmp1;
potentialJobsDone += pottmp;
} else {
//underExec ++;
long taui = System.nanoTime() - ws.getLatestJobStartTime();
double tauiToMin = (double)taui/60000000000L;
double intermET = cluster.estimateIntermediary(taui) / 60000000000L;
v = (double) (ws.getNoJobs()+1)/(intermET + (double)ws.getRuntime()/60000000000L);
long timeLeftATU;
long currentPosition;
if(!ws.isMarked()) {
noATUsPaidFor = (int) Math.ceil(((double)crtUptime / 60000 + intermET - tauiToMin)
/ cluster.timeUnit);
/*check if i should terminate this ATU*/
if((cluster.noATUPlan-((int) Math.ceil((double)crtUptime / 60000 / cluster.timeUnit)-atusBeforeReconfig)) == 0) {
terminateWorker(cluster, ws, " current ATU is last");
cluster.necNodes
consumedBudget += (int)(Math.ceil((double)crtUptime / 60000 / cluster.timeUnit)) * cluster.costUnit;
if((int)(Math.ceil((double)crtUptime / 60000 / cluster.timeUnit)) < noATUsPaidFor) {
timeLeftATU = 0;
noATUsPaidFor = (int) Math.ceil((double)crtUptime / 60000L / cluster.timeUnit);
} else {
timeLeftATU = cluster.timeUnit*60000 - (crtUptime + (long)((intermET-tauiToMin)*60000))%(cluster.timeUnit*60000);
}
tmp1=((double)timeLeftATU/60000)*(double) (ws.getNoJobs()+1)/(intermET + (double)ws.getRuntime()/60000000000L);
tmp = (int) Math.floor(((double)timeLeftATU/60000)*(double) (ws.getNoJobs()+1)/(intermET + (double)ws.getRuntime()/60000000000L));
System.out.println("(marked now) node " + ws.getIbisIdentifier().location().toString() +
": currentPaidForATU=" + noATUsPaidFor +
" ; timeLeftATU=" + timeLeftATU +
" ; atusBeforeReconfig=" + atusBeforeReconfig +
" ; virtualJobsDone="+tmp);
} else {
va += v;
consumedBudget += noATUsPaidFor * cluster.costUnit;
currentPosition = (crtUptime + (long)((intermET-tauiToMin)*60000))%(cluster.timeUnit*60000);
timeLeftATU = cluster.timeUnit*60000 - currentPosition;
tmp1=((double)timeLeftATU/60000)*v;
tmp = (int) Math.floor(((double)timeLeftATU/60000)*v);
ws.setTimeLeftATU(timeLeftATU);
ws.setSpeed(v);
ws.setFuturePoint((double)(timeLeftATU - (double)60000*tmp/v)/60000);
pottmp = (int)Math.floor(((double)(cluster.noATUPlan-(noATUsPaidFor-atusBeforeReconfig))*cluster.timeUnit
+ws.getFuturePoint())*v);
ws.setLastEstimatedUptime(noATUsPaidFor*cluster.timeUnit - ws.getFuturePoint());
System.out.println("node " + ws.getIbisIdentifier().location().toString() +
": currentPaidForATU=" + noATUsPaidFor +
" ; timeLeftATU=" + timeLeftATU +
" ; atusBeforeReconfig=" + atusBeforeReconfig +
" ; virtualJobsDone="+tmp +
" ; potentialJobsDone="+pottmp+
" ; futurePoint="+ws.getFuturePoint() + "; v="+v);
}
} else {
/*already marked*/
noATUsPaidFor = (int) Math.ceil((double)crtUptime/ 60000L / cluster.timeUnit);
consumedBudget += noATUsPaidFor * cluster.costUnit;
if(noATUsPaidFor < (int) Math.ceil(((double)crtUptime / 60000 + intermET - tauiToMin)
/ cluster.timeUnit)) {
timeLeftATU = 0;
} else {
timeLeftATU = cluster.timeUnit*60000 - (crtUptime + (long)((intermET-tauiToMin)*60000))%(cluster.timeUnit*60000);
}
tmp1=((double)timeLeftATU/60000)*(double) (ws.getNoJobs()+1)/(intermET + (double)ws.getRuntime()/60000000000L);
tmp = (int) Math.floor(((double)timeLeftATU/60000)*(double) (ws.getNoJobs()+1)/(intermET + (double)ws.getRuntime()/60000000000L));
ws.setTimeLeftATU(timeLeftATU);
System.out.println("(marked) node " + ws.getIbisIdentifier().location().toString() +
": currentPaidForATU=" + noATUsPaidFor +
" ; timeLeftATU=" + timeLeftATU +
" ; atusBeforeReconfig=" + atusBeforeReconfig +
" ; virtualJobsDone="+tmp);
}
virtualJobsDone += tmp;
virtualJobsDoneDouble += tmp1;
potentialJobsDone += pottmp;
}
} else {
consumedBudget += Math.ceil((double)ws.getUptime() / 60000 / cluster.timeUnit) * cluster.costUnit;
}
}
System.out.println("cluster " + cluster.alias + ": va=" + va +
"; virtualJobsDone=" + virtualJobsDone
+"; virtualJobsDoneDouble=" + virtualJobsDoneDouble
+"; potentialJobsDoneDouble=" + potentialJobsDone);
System.out.println("cluster " + cluster.alias
+ ": prevNecNodes=" + cluster.prevNecNodes
+ "; necNodes="+cluster.necNodes
+ "; crtNodes="+cluster.crtNodes
+ "; pendingNodes="+cluster.pendingNodes);
}
/*how many more minutes*/
int jobsLeft = bot.tasks.size() - /*(int)Math.floor(virtualJobsDoneDouble)*/virtualJobsDone ;
double minSinceLastReconfig = (double)(System.currentTimeMillis() - lastReconfigTime)/60000;
double etLeft = jobsLeft / va;
if(etLeft<0) etLeft=0;
/*how much more money*/
double ebLeft = 0.0; /*Math.ceil(etLeft/bot.timeUnit) * ca;*/
System.out.println("Total execution time since last reconfiguration: " + Math.ceil((minSinceLastReconfig+etLeft)/bot.timeUnit));
/* for(Cluster cluster : clusters) {
compute the actual speed of this cluster using the worker stats
for(WorkerStats ws : workers.get(cluster.alias).values()) {
if(!ws.isFinished()) {
if(!ws.isMarked()) {
expressed in ATU
int timeLeftWorker; etLeft - ws.getOffset();||cluster.Ti*ws.getSpeed()*jobsLeft/va;||etLeft-ws.getFuturePoint();
int atusBeforeReconfig=0;
!!!!might be better to subtract and then compute ceil
if(ws.getStartTime() < lastReconfigTime) {
atusBeforeReconfig = (int) (Math.ceil((double)(lastReconfigTime-ws.getStartTime())/60000)/cluster.timeUnit);
}
timeLeftWorker = cluster.noATUPlan - ((int)(
Math.ceil(ws.getLastEstimatedUptime()/cluster.timeUnit)) -
atusBeforeReconfig);
double costWorker;
if(timeLeftWorker <= 0) {
costWorker = 0;
terminateNode(cluster, ws, " current ATU is last");
cluster.necNodes --;
might need to deal with cluster.nec nodes and so on
}
else {
costWorker = timeLeftWorker*cluster.costUnit;
}
ebLeft += costWorker;
System.out.println("worker " + ws.getIbisIdentifier().location().toString()
+ ": ATUs before reconfig " + atusBeforeReconfig
+ "; estimated future cost= " + costWorker
+ "; timeLeftWorker " + timeLeftWorker);
}
}
}
}
*/
/*DEBUG*/
System.out.println("estimated number of jobs left: " + jobsLeft
+ " ; estimated time needed: " + etLeft
//+ " ; estimated budget needed: " + ebLeft
+ " ; estimated number of potential jobs: " + potentialJobsDone
+ " ; consumed budget: " + consumedBudget);
if(((jobsLeft>potentialJobsDone)/*(ebLeft > 1.0*(bot.budget-consumedBudget))*/ && bot.allStatsReady)) {
if(items.size() > 1) {
Knapsack moo = new Knapsack(items.toArray(new Item[0]),
(long)bot.budget-(long)consumedBudget, jobsLeft, bot.minCostATU,
bot.maxCostATU,(int)bot.timeUnit);
System.out.println("budget available: " + (bot.budget-consumedBudget) +
" ; number of jobs to go: " + jobsLeft +
" ; minCostATU: " + bot.minCostATU + " ; maxCostATU: " + bot.maxCostATU);
/*ItemType[] itemTypes = prepContKnap();
ContKnap moo = new ContKnap(itemTypes, bot.budget, bot.timeUnit);*/
HashMap<String, Integer> machinesPerCluster = moo.findSol();
if((!moo.success) && (schedule.extraBudget)) {
bot.budget += schedule.bDeltaN;
schedule.extraBudget = false;
moo = new Knapsack(items.toArray(new Item[0]),
(long)bot.budget-(long)consumedBudget, jobsLeft, bot.minCostATU,
bot.maxCostATU,(int)bot.timeUnit);
System.out.println("unable to find schedule with initial budget; we have to use the extra budget!");
System.out.println("budget available: " + (bot.budget-consumedBudget) +
" ; out of which bDeltaN: " + schedule.bDeltaN +
" ; number of jobs to go: " + jobsLeft +
" ; minCostATU: " + bot.minCostATU + " ; maxCostATU: " + bot.maxCostATU);
/*ItemType[] itemTypes = prepContKnap();
ContKnap moo = new ContKnap(itemTypes, bot.budget, bot.timeUnit);*/
machinesPerCluster = moo.findSol();
} else {
if(moo.success && schedule.extraBudget) {
int nnec;
boolean decreaseJobsLeftDeltaN = false;
for(Cluster cluster : clusters) {
if(machinesPerCluster.get(cluster.alias)==null)
nnec = 0;
else nnec = machinesPerCluster.get(cluster.alias).intValue();
if(cluster.necNodes > nnec) {
decreaseJobsLeftDeltaN = true;
break;
}
}
if(decreaseJobsLeftDeltaN) {
System.out.println("had to find new schedule due to jobsLeft higher than potential jobs done;"
+ "within initial budget, but less machines; we have to check effect of deltaN jobs!");
if((jobsLeft - schedule.deltaN) > potentialJobsDone) {
System.out.println("deltaN jobs doesn't cover discrepancy!!! compute new schedule!");
moo = new Knapsack(items.toArray(new Item[0]),
(long)bot.budget-(long)consumedBudget, jobsLeft, bot.minCostATU,
bot.maxCostATU,(int)bot.timeUnit);
System.out.println("budget available: " + (bot.budget-consumedBudget) +
" ; number of jobs to go: " + jobsLeft +
" ; out of which deltaN: " + schedule.deltaN +
" ; minCostATU: " + bot.minCostATU + " ; maxCostATU: " + bot.maxCostATU);
machinesPerCluster = moo.findSol();
if(!moo.success) {
System.out.println("Could not find schedule within initial budget; must use extra budget!");
bot.budget += schedule.bDeltaN;
schedule.extraBudget = false;
moo = new Knapsack(items.toArray(new Item[0]),
(long)bot.budget-(long)consumedBudget, jobsLeft, bot.minCostATU,
bot.maxCostATU,(int)bot.timeUnit);
System.out.println("budget available: " + (bot.budget-consumedBudget) +
" ; out of which bDeltaN: " + schedule.bDeltaN +
" ; number of jobs to go: " + jobsLeft +
" ; out of which deltaN: " + schedule.deltaN +
" ; minCostATU: " + bot.minCostATU + " ; maxCostATU: " + bot.maxCostATU);
}
} else {
System.out.println("Nothing changed! " +
"deltaN jobs responsible for discrepancy! " +
"budget available: " + (bot.budget-consumedBudget) +
" ; number of jobs to go: " + jobsLeft +
" ; out of which deltaN: " + schedule.deltaN +
" ; minCostATU: " + bot.minCostATU + " ; maxCostATU: " + bot.maxCostATU);
timeOfLastSchedule = System.currentTimeMillis();
return;
}
}
}
}
System.out.println("NoATUsPlan=" + moo.noATUPlan);
for(Cluster cluster : clusters) {
Integer Mi = machinesPerCluster.get(cluster.alias);
int moreWorkers = 0;
cluster.noATUPlan = moo.noATUPlan;
if(Mi == null) {
if(cluster.Ti!=0) {
Mi = new Integer(0);
} else {
continue;
}
}
lastReconfigTime = System.currentTimeMillis();
cluster.prevNecNodes = cluster.necNodes;
System.out.println("cluster " + cluster.alias
+ ": prevNecNodes=" + cluster.prevNecNodes
+ "; necNodes="+Mi.intValue()
+ "; crtNodes="+cluster.crtNodes
+ "; pendingNodes="+cluster.pendingNodes);
if(Mi.intValue() > cluster.prevNecNodes) {
if(Mi.intValue() > cluster.crtNodes + cluster.pendingNodes) {
moreWorkers = Math.min(cluster.maxNodes, Mi.intValue())
- cluster.crtNodes - cluster.pendingNodes;
if(cluster.crtNodes > cluster.prevNecNodes) {
int marked = cluster.crtNodes - cluster.prevNecNodes;
moreWorkers += marked;
}
ArrayList<WorkerStats> orderedByTimeLeftATU = new ArrayList<WorkerStats>(workers.get(cluster.alias).values());
Collections.sort(orderedByTimeLeftATU, new Comparator<WorkerStats>(){
public int compare(WorkerStats a, WorkerStats b) {
if(a.isMarked() && b.isMarked()) {
return a.timeLeftATU - b.timeLeftATU > 0 ? 1 : -1;
} else
if (a.isMarked()) return -1;
else
if (b.isMarked()) return 1;
else return a.timeLeftATU - b.timeLeftATU > 0 ? 1 : -1;
}
});
for(int i=0; i < orderedByTimeLeftATU.size(); i++) {
WorkerStats ws = orderedByTimeLeftATU.get(i);
if(ws.isMarked() && (!ws.isFinished()) && (ws.killingMe!=null)) {
if(ws.killingMe.cancel()) {
ws.unmarkTerminated();
moreWorkers
System.out.println("Will not terminate node: "
+ ws.getIbisIdentifier().location().toString());
if(moreWorkers == 0) break;
}
}
}
if(moreWorkers != 0) {
System.out.println("Could not resurect enough workers; will acquire more!");
cluster.startNodes("12:45:00", moreWorkers, bot.electionName, bot.poolName, bot.serverAddress);
cluster.setPendingNodes(cluster.pendingNodes + moreWorkers);
/*DEBUG*/
System.out.println("Cluster " + cluster.alias + ": started " + moreWorkers + " more workers.");
}
}
/*!in testing!*/
else {
int keepWorkers = Mi.intValue() - cluster.prevNecNodes;
ArrayList<WorkerStats> orderedByTimeLeftATU = new ArrayList<WorkerStats>(workers.get(cluster.alias).values());
Collections.sort(orderedByTimeLeftATU, new Comparator<WorkerStats>(){
public int compare(WorkerStats a, WorkerStats b) {
if(a.isMarked() && b.isMarked()) {
return a.timeLeftATU - b.timeLeftATU > 0 ? 1 : -1;
} else
if (a.isMarked()) return -1;
else
if (b.isMarked()) return 1;
else return a.timeLeftATU - b.timeLeftATU > 0 ? 1 : -1;
}
});
for(int i=0; i < orderedByTimeLeftATU.size(); i++) {
WorkerStats ws = orderedByTimeLeftATU.get(i);
if(ws.isMarked() && (!ws.isFinished()) && (ws.killingMe!=null)) {
if(ws.killingMe.cancel()) {
ws.unmarkTerminated();
keepWorkers
System.out.println("Will not terminate node: "
+ ws.getIbisIdentifier().location().toString());
if(keepWorkers == 0) break;
}
}
}
if(keepWorkers != 0) {
System.out.println("Trouble!!!!!! Could not resurect enough workers; should reacquire them!");
}
}
cluster.necNodes = Mi.intValue();
} else if(Mi.intValue() < cluster.prevNecNodes) {
cluster.necNodes = Mi.intValue();
if(cluster.necNodes < cluster.crtNodes + cluster.pendingNodes) {
System.out.println("Terminate nodes on cluster " + cluster.alias);
selectTerminatedWorkers(cluster);
}
}
/*DEBUG*/
System.out.println("cluster " + cluster.alias +
"-> new necessary number of workers: " + cluster.necNodes);
}
}
else System.out.println("No cluster stats available yet");
}
/*DEBUG*/
else System.out.println("Nothing changed");
timeOfLastSchedule = System.currentTimeMillis();
}
}
@SuppressWarnings("unchecked")
private void selectTerminatedWorkers(Cluster cluster) {
if(cluster.crtNodes <= cluster.necNodes) {
/*i need to get rid of only extra pending nodes*/
cluster.pendingNodes = cluster.necNodes - cluster.crtNodes;
return;
}
int howManyCrtNodes = cluster.prevNecNodes - cluster.necNodes;
ArrayList<WorkerStats> orderedByArrival = new ArrayList(workers.get(cluster.alias).values());
Collections.sort(orderedByArrival, new Comparator<WorkerStats>(){
public int compare(WorkerStats a, WorkerStats b) {
return a.timestamp - b.timestamp;
}
});
for(int i=orderedByArrival.size()-1; i>=0; i
WorkerStats ws = orderedByArrival.get(i);
/*could select the terminated workers based on other criterion*/
if((!ws.isFinished()) && (!ws.isMarked())) {
/*5*60000 should be replaced by a function of Ti*/
/*long timeLeftATU = cluster.timeUnit*60000 - ws.getUptime()%(cluster.timeUnit*60000) - 5*60000;
if(timeLeftATU < 0) {
try {
myIbis.registry().signal("die", ws.getIbisIdentifier());
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
timer.schedule(new MyTimerTask(myIbis,ws.getIbisIdentifier()),timeLeftATU);
}
ws.markTerminated();
System.out.println("Selected node " + ws.getIbisIdentifier().location().toString() + " for termination");
*/
terminateWorker(cluster, ws, " selected by scheduler");
howManyCrtNodes
if(howManyCrtNodes == 0) break;
}
}
return;
}
public void terminateWorker(Cluster cluster, WorkerStats ws, String reason) {
long crtTime=0;
long timeLeftATU = cluster.timeUnit*60000 - ws.getUptime()%(cluster.timeUnit*60000) - 60000;
TimerTask tt = null;
if(timeLeftATU <= 0) {
try {
timeLeftATU = 0;
crtTime= System.currentTimeMillis();
cluster.terminateNode(ws.getIbisIdentifier(), myIbis);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
crtTime= System.currentTimeMillis();
tt = new MyTimerTask(cluster, ws.getIbisIdentifier(), myIbis);
timer.schedule(tt,timeLeftATU);
}
ws.markTerminated(tt);
ws.setTimeToDie(crtTime+timeLeftATU);
System.out.println("Node " + ws.getIbisIdentifier().location().toString() + " will terminate after current ATU due to "
+ reason);
}
private void decide(boolean socketTimeout) {
decideWithMovingAverage(socketTimeout);
}
@Override
protected Job handleJobRequest(IbisIdentifier from) {
/* For now, when a new worker shows up, if there are no more jobs just return nojob.
*TODO: Should consider later addition of job replication. */
String cluster = from.location().getParent().toString();
String node = from.location().getLevel(0);
/*DEBUG*/
System.err.println("job request from node " + from.location().toString() + " in cluster " + cluster);
/*might be changed to allow execution for one ATU, since it has already started
* for now we do not consider it, and behave as if we do not pay*/
if(bot.Clusters.get(cluster).pendingNodes == 0) {
/*DEBUG*/
System.err.println("node " + from.location().toString() + " in cluster " + cluster
+ " has been dismissed right after registration with master");
/* we do not use the VM, so better immedeately remove it from the pool
* and let the customer pay for this VM anyway
*/
bot.decrementUserCredit(1.0);
terminateWorker(bot.Clusters.get(cluster), workers.get(cluster).get(node), " worker was dismissed");
return new NoJob();
}
WorkerStats reacquiredMachine = workers.get(cluster).get(node);
long startTime = System.currentTimeMillis();
if(reacquiredMachine == null) {
workers.get(cluster).put(node, new WorkerStats(node,startTime, from));
workers.get(cluster).get(node).setIbisIdentifier(from);
bot.Clusters.get(cluster).timestamp++;
if(bot.Clusters.get(cluster).timestamp > bot.noInitialWorkers)
workers.get(cluster).get(node).setOffset((double)bot.Clusters.get(cluster).timeUnit-((startTime-this.actualStartTime)%(bot.Clusters.get(cluster).timeUnit*60000))/60000);
else
workers.get(cluster).get(node).setInitialWorker(true);
workers.get(cluster).get(node).timestamp = bot.Clusters.get(cluster).timestamp;
workers.get(cluster).get(node).noATUPlan = bot.Clusters.get(cluster).noATUPlan;
} else {
reacquiredMachine.reacquire(bot.Clusters.get(cluster).timeUnit, startTime);
reacquiredMachine.setIbisIdentifier(from);
workers.get(cluster).get(node).setOffset((double)bot.Clusters.get(cluster).timeUnit-((startTime-this.actualStartTime)%(bot.Clusters.get(cluster).timeUnit*60000))/60000);
bot.Clusters.get(cluster).timestamp++;
workers.get(cluster).get(node).timestamp = bot.Clusters.get(cluster).timestamp;
workers.get(cluster).get(node).noATUPlan = bot.Clusters.get(cluster).noATUPlan;
}
bot.Clusters.get(cluster).setCrtNodes(bot.Clusters.get(cluster).getCrtNodes()+1);
bot.Clusters.get(cluster).setPendingNodes(bot.Clusters.get(cluster).getPendingNodes()-1);
/*release unnecessary workers*/
if(releaseNow(cluster,from)) {
return new NoJob();
}
return findNextJob(cluster,from);
}
@Override
protected Job handleJobResult(JobResult received, IbisIdentifier from) {
// TODO Auto-generated method stub
String cluster = from.location().getParent().toString();
System.err.println(from.location().toString() + " returned result of job " + received.getJobID() + " executed for (sec)" + received.getStats().getRuntime()/1000000000);
/* assumes jobs don't need to be replicated on the same cluster, except on failure */
jobsDone ++;
workers.get(cluster).get(from.location().getLevel(0)).addJobStats(received.getStats().getRuntime());
// just counting the finished tasks so far.
bot.finishedTasks.add(bot.Clusters.get(cluster).getJob(received));
bot.Clusters.get(cluster).doneJob(received);
decide(false);
/*release unnecessary workers*/
if(releaseNow(cluster,from)) {
return new NoJob();
} /*else if(bot.tasks.size() == 0) {
return new AskAgainJob();
}*/
return findNextJob(cluster,from);
}
private boolean releaseNow(String cluster, IbisIdentifier to) {
/*could check whether "black-listed" machine and, if too little of ATU is left
* get rid of it now and maybe even cancel the respective timer*/
/*if enough of ATU is left could replicate jobs sent to other "dying" workers*/
/*this.jobsDone == this.totalNumberTasks*/
if(bot.tasks.size()==0) {
System.err.println("We say goodbye to " + to.location().toString() + " from " + this.getClass().getName());
String node = to.location().getLevel(0);
workers.get(cluster).get(node).workerFinished(System.currentTimeMillis());
workers.get(cluster).get(node).setLatestJobStartTime(0);
bot.Clusters.get(cluster).setCrtNodes(bot.Clusters.get(cluster).getCrtNodes()-1);
//bot.Clusters.get(cluster).setPendingNodes(bot.Clusters.get(cluster).getPendingNodes()-1);
return true;
}
return false;
}
private Job findNextJob(String cluster, IbisIdentifier from) {
Job nextJob = bot.tasks.remove(random.nextInt(bot.tasks.size()));
/*the fact that pending jobs are timed from master side (hence including the latency to the worker) should
* be mentioned and should also have some impact on the convergence speed of the histogram in those cases where
* the job size is somewhat equal to this latency.
* */
nextJob.startTime = System.nanoTime();
workers.get(cluster).get(from.location().getLevel(0)).setLatestJobStartTime(nextJob.startTime);
bot.Clusters.get(cluster).subsetJobs.put(nextJob.jobID, nextJob);
if(bot.Clusters.get(cluster).samplingPoints.size() < bot.subsetLength) {
bot.Clusters.get(cluster).samplingPoints.put(nextJob.jobID, nextJob);
}
/* might be the case that even here I return sayGB() */
return nextJob;
}
void updateFrontEndCache()
{
double price = 0;
double oldvalue = 0;
for (Cluster cluster : bot.Clusters.values()) {
Collection<WorkerStats> wss = workers.get(cluster.alias).values();
for (WorkerStats ws : wss) {
price += Math.ceil((double)ws.getUptime() / 60000 / cluster.timeUnit) * cluster.costUnit;
}
}
oldvalue = BatsServiceApiImpl.serviceState.moneySpent;
BatsServiceApiImpl.serviceState.moneySpent = price + BatsServiceApiImpl.serviceState.moneySpentSampling;
bot.decrementUserCredit(BatsServiceApiImpl.serviceState.moneySpent - oldvalue);
BatsServiceApiImpl.serviceState.noCompletedTasks = bot.finishedTasks.size();
}
@Override
public void run() {
// TODO Auto-generated method stub
timeOfLastSchedule = System.currentTimeMillis();
timeout = 5* 60000; /*(long) (BoTRunner.INITIAL_TIMEOUT_PERCENT * bot.deadline * 60000);*/
System.err.println("Timeout is now " + timeout);
actualStartTime = System.currentTimeMillis();
boolean undone = true;
boolean socketTimeout = false;
while (undone) {
try {
socketTimeout = false;
ReadMessage rm = masterRP.receive(30000);
Object received = rm.readObject();
IbisIdentifier from = rm.origin().ibisIdentifier();
rm.finish();
Job nextJob = null;
if (received instanceof JobRequest) {
nextJob = handleJobRequest(from);
} else if (received instanceof JobResult) {
nextJob = handleJobResult((JobResult) received, from);
} else {
throw new RuntimeException("received "
+ "an object which is not JobRequest or JobResult:" + received);
}
nextJob.setNode(from.location().getLevel(0));
/*end for hpdc tests*/
/* !!! don't forget to decrease the number of crt nodes*/
Process p = c.startNodes(/* deadline2ResTime() */"12:45:00",
|
package org.hisp.dhis.datavalue.hibernate;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.hisp.dhis.common.IdentifiableObjectUtils;
import org.hisp.dhis.common.MapMap;
import org.hisp.dhis.commons.util.TextUtils;
import org.hisp.dhis.dataelement.CategoryOptionGroup;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOption;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataelement.DataElementOperand;
import org.hisp.dhis.datavalue.DataValue;
import org.hisp.dhis.datavalue.DataValueStore;
import org.hisp.dhis.datavalue.DeflatedDataValue;
import org.hisp.dhis.jdbc.StatementBuilder;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodStore;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.system.util.DateUtils;
import org.hisp.dhis.system.util.MathUtils;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import com.google.api.client.util.Sets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hisp.dhis.common.IdentifiableObjectUtils.getIdentifiers;
/**
* @author Torgeir Lorange Ostby
*/
public class HibernateDataValueStore
implements DataValueStore
{
private static final Log log = LogFactory.getLog( HibernateDataValueStore.class );
// Dependencies
private SessionFactory sessionFactory;
public void setSessionFactory( SessionFactory sessionFactory )
{
this.sessionFactory = sessionFactory;
}
private PeriodStore periodStore;
public void setPeriodStore( PeriodStore periodStore )
{
this.periodStore = periodStore;
}
private JdbcTemplate jdbcTemplate;
public void setJdbcTemplate( JdbcTemplate jdbcTemplate )
{
this.jdbcTemplate = jdbcTemplate;
}
private StatementBuilder statementBuilder;
public void setStatementBuilder( StatementBuilder statementBuilder )
{
this.statementBuilder = statementBuilder;
}
// Basic DataValue
@Override
public void addDataValue( DataValue dataValue )
{
dataValue.setPeriod( periodStore.reloadForceAddPeriod( dataValue.getPeriod() ) );
sessionFactory.getCurrentSession().save( dataValue );
}
@Override
public void updateDataValue( DataValue dataValue )
{
dataValue.setPeriod( periodStore.reloadForceAddPeriod( dataValue.getPeriod() ) );
sessionFactory.getCurrentSession().update( dataValue );
}
@Override
public void deleteDataValues( OrganisationUnit organisationUnit )
{
String hql = "delete from DataValue d where d.source = :source";
sessionFactory.getCurrentSession().createQuery( hql ).
setEntity( "source", organisationUnit ).executeUpdate();
}
@Override
public void deleteDataValues( DataElement dataElement )
{
String hql = "delete from DataValue d where d.dataElement = :dataElement";
sessionFactory.getCurrentSession().createQuery( hql )
.setEntity( "dataElement", dataElement ).executeUpdate();
}
@Override
public DataValue getDataValue( DataElement dataElement, Period period, OrganisationUnit source,
DataElementCategoryOptionCombo categoryOptionCombo, DataElementCategoryOptionCombo attributeOptionCombo )
{
Session session = sessionFactory.getCurrentSession();
Period storedPeriod = periodStore.reloadPeriod( period );
if ( storedPeriod == null )
{
return null;
}
return (DataValue) session.createCriteria( DataValue.class )
.add( Restrictions.eq( "dataElement", dataElement ) )
.add( Restrictions.eq( "period", storedPeriod ) )
.add( Restrictions.eq( "source", source ) )
.add( Restrictions.eq( "categoryOptionCombo", categoryOptionCombo ) )
.add( Restrictions.eq( "attributeOptionCombo", attributeOptionCombo ) )
.add( Restrictions.eq( "deleted", false ) )
.uniqueResult();
}
@Override
public DataValue getSoftDeletedDataValue( DataValue dataValue )
{
Session session = sessionFactory.getCurrentSession();
Period storedPeriod = periodStore.reloadPeriod( dataValue.getPeriod() );
if ( storedPeriod == null )
{
return null;
}
return (DataValue) session.createCriteria( DataValue.class )
.add( Restrictions.eq( "dataElement", dataValue.getDataElement() ) )
.add( Restrictions.eq( "period", storedPeriod ) )
.add( Restrictions.eq( "source", dataValue.getSource() ) )
.add( Restrictions.eq( "categoryOptionCombo", dataValue.getCategoryOptionCombo() ) )
.add( Restrictions.eq( "attributeOptionCombo", dataValue.getAttributeOptionCombo() ) )
.add( Restrictions.eq( "deleted", true ) )
.uniqueResult();
}
// Collections of DataValues
@Override
@SuppressWarnings( "unchecked" )
public List<DataValue> getAllDataValues()
{
return sessionFactory.getCurrentSession()
.createCriteria( DataValue.class )
.add( Restrictions.eq( "deleted", false ) )
.list();
}
@Override
@SuppressWarnings( "unchecked" )
public List<DataValue> getDataValues( Collection<DataElement> dataElements,
Collection<Period> periods, Collection<OrganisationUnit> organisationUnits )
{
Set<Period> storedPeriods = Sets.newHashSet();
for ( Period period : periods )
{
storedPeriods.add( periodStore.reloadPeriod( period ) );
}
if ( dataElements.isEmpty() && storedPeriods.isEmpty() && organisationUnits.isEmpty() )
{
return new ArrayList<>();
}
Criteria criteria = sessionFactory.getCurrentSession()
.createCriteria( DataValue.class )
.add( Restrictions.eq( "deleted", false ) );
if ( !dataElements.isEmpty() )
{
criteria.add( Restrictions.in( "dataElement", dataElements ) );
}
if ( !storedPeriods.isEmpty() )
{
criteria.add( Restrictions.in( "period", storedPeriods ) );
}
if ( !organisationUnits.isEmpty() )
{
criteria.add( Restrictions.in( "source", organisationUnits ) );
}
return criteria.list();
}
@Override
@SuppressWarnings( "unchecked" )
public List<DataValue> getDataValues( OrganisationUnit source, Period period,
Collection<DataElement> dataElements, DataElementCategoryOptionCombo attributeOptionCombo )
{
Period storedPeriod = periodStore.reloadPeriod( period );
if ( storedPeriod == null || dataElements == null || dataElements.isEmpty() )
{
return new ArrayList<>();
}
Session session = sessionFactory.getCurrentSession();
return session.createCriteria( DataValue.class )
.add( Restrictions.in( "dataElement", dataElements ) )
.add( Restrictions.eq( "period", storedPeriod ) )
.add( Restrictions.eq( "source", source ) )
.add( Restrictions.eq( "attributeOptionCombo", attributeOptionCombo ) )
.add( Restrictions.eq( "deleted", false ) )
.list();
}
@Override
public List<DeflatedDataValue> getDeflatedDataValues( DataElement dataElement, DataElementCategoryOptionCombo categoryOptionCombo,
Collection<Period> periods, Collection<OrganisationUnit> sources )
{
List<DeflatedDataValue> result = new ArrayList<DeflatedDataValue>();
Collection<Integer> periodIdList = IdentifiableObjectUtils.getIdentifiers( periods );
List<Integer> sourceIdList = IdentifiableObjectUtils.getIdentifiers( sources );
Integer dataElementId = dataElement.getId();
String sql = "select categoryoptioncomboid, attributeoptioncomboid, value, " +
"sourceid, periodid, storedby, created, lastupdated, comment, followup " +
"from datavalue " +
"where dataelementid=" + dataElementId + " " +
( ( categoryOptionCombo == null ) ? "" : ( "and categoryoptioncomboid=" + categoryOptionCombo.getId() + " " ) ) +
"and sourceid in (" + TextUtils.getCommaDelimitedString( sourceIdList ) + ") " +
"and periodid in (" + TextUtils.getCommaDelimitedString( periodIdList ) + ") " +
"and deleted is false";
SqlRowSet rowSet = jdbcTemplate.queryForRowSet( sql );
while ( rowSet.next() )
{
Integer categoryOptionComboId = rowSet.getInt( 1 );
Integer attributeOptionComboId = rowSet.getInt( 2 );
String value = rowSet.getString( 3 );
Integer sourceId = rowSet.getInt( 4 );
Integer periodId = rowSet.getInt( 5 );
String storedBy = rowSet.getString( 6 );
Date created = rowSet.getDate( 7 );
Date lastUpdated = rowSet.getDate( 8 );
String comment = rowSet.getString( 9 );
boolean followup = rowSet.getBoolean( 10 );
if ( value != null )
{
DeflatedDataValue dv = new DeflatedDataValue( dataElementId, periodId, sourceId,
categoryOptionComboId, attributeOptionComboId, value,
storedBy, created, lastUpdated,
comment, followup );
result.add( dv );
}
}
return result;
}
@Override
public List<DeflatedDataValue> sumRecursiveDeflatedDataValues(
DataElement dataElement, DataElementCategoryOptionCombo categoryOptionCombo,
Collection<Period> periods, OrganisationUnit source )
{
List<DeflatedDataValue> result = new ArrayList<DeflatedDataValue>();
Collection<Integer> periodIdList = IdentifiableObjectUtils.getIdentifiers( periods );
Integer dataElementId = dataElement.getId();
String sourcePrefix = source.getPath();
Integer sourceId = source.getId();
String castType = statementBuilder.getDoubleColumnType();
String sql = "select dataelementid, categoryoptioncomboid, attributeoptioncomboid, periodid, " +
"sum(cast(value as "+castType+")) as value " +
"from datavalue, organisationunit " +
"where dataelementid=" + dataElementId + " " +
"and sourceid = organisationunitid " +
((categoryOptionCombo == null) ? "" :
("and categoryoptioncomboid=" + categoryOptionCombo.getId() + " ")) +
"and path like '" + sourcePrefix + "%' " +
"and periodid in (" + TextUtils.getCommaDelimitedString( periodIdList ) + ") " +
"and deleted is false " +
"group by dataelementid, categoryoptioncomboid, attributeoptioncomboid, periodid";
SqlRowSet rowSet = jdbcTemplate.queryForRowSet( sql );
while ( rowSet.next() )
{
Integer categoryOptionComboId = rowSet.getInt( 2 );
Integer attributeOptionComboId = rowSet.getInt( 3 );
Integer periodId = rowSet.getInt( 4 );
String value = rowSet.getString( 5 );
if ( value != null )
{
DeflatedDataValue dv = new DeflatedDataValue( dataElementId, periodId, sourceId,
categoryOptionComboId, attributeOptionComboId, value );
result.add( dv );
}
}
log.debug("sumRecursiveDeflatedDataValues: " + result.size() + " results from \"" + sql + "\"");
return result;
}
@Override
public int getDataValueCountLastUpdatedAfter( Date date )
{
Criteria criteria = sessionFactory.getCurrentSession()
.createCriteria( DataValue.class )
.add( Restrictions.ge( "lastUpdated", date ) )
.add( Restrictions.eq( "deleted", false ) )
.setProjection( Projections.rowCount() );
Number rs = (Number) criteria.uniqueResult();
return rs != null ? rs.intValue() : 0;
}
@Override
public MapMap<Integer, DataElementOperand, Double> getDataValueMapByAttributeCombo( Collection<DataElement> dataElements, Date date,
OrganisationUnit source, Collection<PeriodType> periodTypes, DataElementCategoryOptionCombo attributeCombo,
Set<CategoryOptionGroup> cogDimensionConstraints, Set<DataElementCategoryOption> coDimensionConstraints,
MapMap<Integer, DataElementOperand, Date> lastUpdatedMap )
{
MapMap<Integer, DataElementOperand, Double> map = new MapMap<>();
if ( dataElements.isEmpty() || periodTypes.isEmpty()
|| ( cogDimensionConstraints != null && cogDimensionConstraints.isEmpty() )
|| ( coDimensionConstraints != null && coDimensionConstraints.isEmpty() ) )
{
return map;
}
String joinCo = coDimensionConstraints == null && cogDimensionConstraints == null ? StringUtils.EMPTY :
"join categoryoptioncombos_categoryoptions c_c on dv.attributeoptioncomboid = c_c.categoryoptioncomboid ";
String joinCog = cogDimensionConstraints == null ? StringUtils.EMPTY :
"join categoryoptiongroupmembers cogm on c_c.categoryoptionid = cogm.categoryoptionid ";
String whereCo = coDimensionConstraints == null ? StringUtils.EMPTY :
"and c_c.categoryoptionid in (" + TextUtils.getCommaDelimitedString( getIdentifiers( coDimensionConstraints ) ) + ") ";
String whereCog = cogDimensionConstraints == null ? StringUtils.EMPTY :
"and cogm.categoryoptiongroupid in (" + TextUtils.getCommaDelimitedString( getIdentifiers( cogDimensionConstraints ) ) + ") ";
String whereCombo = attributeCombo == null ? StringUtils.EMPTY :
"and dv.attributeoptioncomboid = " + attributeCombo.getId() + " ";
String sql = "select de.uid, coc.uid, dv.attributeoptioncomboid, dv.value, dv.lastupdated, p.startdate, p.enddate " +
"from datavalue dv " +
"join dataelement de on dv.dataelementid = de.dataelementid " +
"join categoryoptioncombo coc on dv.categoryoptioncomboid = coc.categoryoptioncomboid " +
"join period p on p.periodid = dv.periodid " + joinCo + joinCog +
"where dv.dataelementid in (" + TextUtils.getCommaDelimitedString( getIdentifiers( dataElements ) ) + ") " +
"and dv.sourceid = " + source.getId() + " " +
"and p.startdate <= '" + DateUtils.getMediumDateString( date ) + "' " +
"and p.enddate >= '" + DateUtils.getMediumDateString( date ) + "' " +
"and p.periodtypeid in (" + TextUtils.getCommaDelimitedString( getIds( periodTypes ) ) + ") " +
"and dv.deleted is false " +
whereCo + whereCog + whereCombo;
SqlRowSet rowSet = jdbcTemplate.queryForRowSet( sql );
MapMap<Integer, DataElementOperand, Long> checkForDuplicates = new MapMap<>();
while ( rowSet.next() )
{
String dataElement = rowSet.getString( 1 );
String categoryOptionCombo = rowSet.getString( 2 );
Integer attributeOptionComboId = rowSet.getInt( 3 );
Double value = MathUtils.parseDouble( rowSet.getString( 4 ) );
Date lastUpdated = rowSet.getDate( 5 );
Date periodStartDate = rowSet.getDate( 6 );
Date periodEndDate = rowSet.getDate( 7 );
long periodInterval = periodEndDate.getTime() - periodStartDate.getTime();
if ( value != null )
{
DataElementOperand dataElementOperand = new DataElementOperand( dataElement, categoryOptionCombo );
Long existingPeriodInterval = checkForDuplicates.getValue( attributeOptionComboId, dataElementOperand );
if ( existingPeriodInterval != null && existingPeriodInterval < periodInterval )
{
continue; // Do not overwrite the previous value if for a shorter interval
}
map.putEntry( attributeOptionComboId, dataElementOperand, value );
if ( lastUpdatedMap != null )
{
lastUpdatedMap.putEntry( attributeOptionComboId, dataElementOperand, lastUpdated );
}
checkForDuplicates.putEntry( attributeOptionComboId, dataElementOperand, periodInterval );
}
}
return map;
}
private Set<Integer> getIds( Collection<PeriodType> periodTypes )
{
Set<Integer> ids = new HashSet<>();
for ( PeriodType pt : periodTypes )
{
ids.add( pt.getId() );
}
return ids;
}
}
|
package liquibase.migrator.exception;
import liquibase.migrator.ChangeSet;
import liquibase.migrator.parser.ValidateChangeLogHandler;
import liquibase.migrator.preconditions.FailedPrecondition;
import java.io.PrintStream;
import java.util.List;
import java.util.Set;
public class ValidationFailedException extends MigrationFailedException {
private static final long serialVersionUID = 1L;
private List<ChangeSet> invalidMD5Sums;
private List<FailedPrecondition> failedPreconditions;
private Set<ChangeSet> duplicateChangeSets;
private List<SetupException> setupExceptions;
public ValidationFailedException(ValidateChangeLogHandler changeLogHandler) {
this.invalidMD5Sums = changeLogHandler.getInvalidMD5Sums();
this.failedPreconditions = changeLogHandler.getFailedPreconditions();
this.duplicateChangeSets = changeLogHandler.getDuplicateChangeSets();
this.setupExceptions = changeLogHandler.getSetupExceptions();
}
public String getMessage() {
StringBuffer message = new StringBuffer();
message.append("Validation Failed:");
if (invalidMD5Sums.size() > 0) {
message.append(invalidMD5Sums.size()).append(" change sets failed MD5Sum Check");
for (int i=0; i< invalidMD5Sums.size(); i++) {
if (i > 25) {
break;
}
ChangeSet invalid = invalidMD5Sums.get(i);
message.append(" ").append(invalid.toString(true));
}
}
if (failedPreconditions.size() > 0) {
message.append(failedPreconditions.size()).append(" preconditions failed");
for (FailedPrecondition invalid : failedPreconditions) {
message.append(" ").append(invalid.toString());
}
}
if (duplicateChangeSets.size() > 0) {
message.append(duplicateChangeSets.size()).append(" change sets had duplicate identifiers");
for (ChangeSet invalid : duplicateChangeSets) {
message.append(" ").append(invalid.toString(false));
}
}
if(setupExceptions.size() >0){
message.append(setupExceptions.size()).append(" changes have failures");
for (SetupException invalid : setupExceptions) {
message.append(" ").append(invalid.toString());
}
}
return message.toString();
}
public List<ChangeSet> getInvalidMD5Sums() {
return invalidMD5Sums;
}
public void printDescriptiveError(PrintStream out) {
out.println("Validation Error: ");
if (invalidMD5Sums.size() > 0) {
out.println(" "+invalidMD5Sums.size()+" change sets have changed since they were ran against the database");
for (ChangeSet changeSet : invalidMD5Sums) {
out.println(" "+changeSet.toString(false));
}
}
if (failedPreconditions.size() > 0) {
out.println(" "+failedPreconditions.size()+" preconditions failed");
for (FailedPrecondition failedPrecondition : failedPreconditions) {
out.println(" "+failedPrecondition.toString());
}
}
if (duplicateChangeSets.size() > 0) {
out.println(" "+duplicateChangeSets.size()+" change sets had duplicate identifiers");
for (ChangeSet duplicate : duplicateChangeSets) {
out.println(" "+duplicate.toString(false));
}
}
if(setupExceptions.size() >0) {
out.println(" "+setupExceptions.size()+" changes had errors");
for (SetupException setupEx : setupExceptions) {
out.println(" "+setupEx.getMessage());
}
}
}
}
|
package com.zaihuishou.expandablerecycleradapter.adapter;
import com.zaihuishou.expandablerecycleradapter.model.ExpandableListItem;
import com.zaihuishou.expandablerecycleradapter.viewholder.AbstractAdapterItem;
import com.zaihuishou.expandablerecycleradapter.viewholder.AbstractExpandableAdapterItem;
import com.zaihuishou.expandablerecycleradapter.viewholder.AdapterItemUtil;
import com.zaihuishou.expandablerecycleradapter.viewholder.BaseAdapterItem;
import android.support.annotation.NonNull;
import android.support.v4.util.ArrayMap;
import android.support.v7.widget.RecyclerView;
import android.view.ViewGroup;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* this adapter is implementation of RecyclerView.Adapter
* creater: zaihuishou
* create time: 7/13/16.
* author email:tanzhiqiang.cathy@gmail.com
*/
public abstract class BaseExpandableAdapter extends RecyclerView.Adapter implements AbstractExpandableAdapterItem.ParentListItemExpandCollapseListener {
protected List<Object> mDataList;
private Object mItemType;
private AdapterItemUtil mUtil = new AdapterItemUtil();
private ExpandCollapseListener mExpandCollapseListener;
private List<RecyclerView> mRecyclerViewList;
public void setExpandCollapseListener(ExpandCollapseListener expandCollapseListener) {
mExpandCollapseListener = expandCollapseListener;
}
protected BaseExpandableAdapter(List data) {
if (data == null) return;
this.mDataList = data;
checkDefaultExpand();
mRecyclerViewList = new ArrayList<>();
}
/**
* check has item is expanded by default
*/
private void checkDefaultExpand() {
ArrayMap<Object, List<?>> childArrayMap = new ArrayMap<>();
Iterator<Object> iterator = mDataList.iterator();
while (iterator.hasNext()) {
Object next = iterator.next();
if (next instanceof ExpandableListItem) {
ExpandableListItem expandableListItem = (ExpandableListItem) next;
if (expandableListItem.isExpanded()) {
List<?> childItemList = expandableListItem.getChildItemList();
if (childItemList != null && !childItemList.isEmpty()) {
childArrayMap.put(next, childItemList);
}
}
}
}
int size = childArrayMap.size();
if (size == 0) return;
for (int i = 0; i < size; i++) {
Object o = childArrayMap.keyAt(i);
List<?> objects = childArrayMap.valueAt(i);
int indexOf = mDataList.indexOf(o);
mDataList.addAll(indexOf + 1, objects);
}
}
@Override
public int getItemCount() {
return mDataList == null ? 0 : mDataList.size();
}
/**
* @return data list
*/
public List<?> getDataList() {
return mDataList;
}
/**
* notifyDataSetChanged
*
* @param data items
*/
public void updateData(@NonNull List<Object> data) {
if (data != null && !data.isEmpty()) {
mDataList = data;
checkDefaultExpand();
notifyDataSetChanged();
}
}
/**
* add an item
*
* @param position intem index
* @param o item
*/
public void addItem(int position, Object o) {
if (isDataListNotEmpty() && position >= 0) {
mDataList.add(position, o);
notifyItemInserted(position);
}
}
/**
* add an item
*
* @param o item object
*/
public void addItem(Object o) {
if (isDataListNotEmpty()) {
mDataList.add(o);
int size = mDataList.size();
notifyItemInserted(size - 1);
}
}
/**
* add items
*
* @param position index
* @param objects list objects
*/
public void addRangeItem(int position, List<Object> objects) {
if (isDataListNotEmpty() && position <= mDataList.size() && position >= 0) {
mDataList.addAll(position, objects);
notifyItemRangeInserted(position, position + objects.size());
}
}
/**
* modify an exit item
*
* @param position index
* @param newObj the new object
*/
public void modifyItem(int position, Object newObj) {
if (isDataListNotEmpty() && position < mDataList.size() && position >= 0) {
mDataList.set(position, newObj);
notifyItemChanged(position);
}
}
/**
* remove item
*
* @param position index
*/
public void removedItem(int position) {
if (isDataListNotEmpty() && position < mDataList.size() && position >= 0) {
Object o = mDataList.get(position);
for (int i = position - 1; i >= 0; i
Object o1 = mDataList.get(i);
if (o1 instanceof ExpandableListItem) {
List<?> childItemList = ((ExpandableListItem) o1).getChildItemList();
final int size = childItemList.size();
for (int j = 0; j < size; j++) {
Object o2 = childItemList.get(j);
if (o == o2) {
childItemList.remove(j);
break;
}
}
}
}
mDataList.remove(position);
notifyItemRemoved(position);
if (position != mDataList.size()) {
notifyItemRangeChanged(position - 1, mDataList.size() - 1);
}
}
}
private boolean isDataListNotEmpty() {
return mDataList != null && !mDataList.isEmpty();
}
@Override
public void onParentListItemCollapsed(int position) {
Object o = mDataList.get(position);
if (o instanceof ExpandableListItem) {
collapseParentListItem((ExpandableListItem) o, position, true);
}
}
/**
* expand parent item
*
* @param position The index of the item in the list being expanded
*/
@Override
public void onParentListItemExpanded(int position) {
try {
Object o = mDataList.get(position);
if (o instanceof ExpandableListItem) {
expandParentListItem((ExpandableListItem) o, position, true, false);
}
} catch (IndexOutOfBoundsException e) {
}
}
/**
* @param expandableListItem {@link ExpandableListItem}
* @param parentIndex item index
*/
private void collapseParentListItem(ExpandableListItem expandableListItem, int parentIndex, boolean collapseTriggeredByListItemClick) {
if (expandableListItem.isExpanded()) {
List<?> childItemList = expandableListItem.getChildItemList();
if (childItemList != null && !childItemList.isEmpty()) {
notifyItemExpandedOrCollapsed(parentIndex, false);
int childListItemCount = childItemList.size();
for (int i = childListItemCount - 1; i >= 0; i
int index = parentIndex + i + 1;
Object o = mDataList.get(index);
if (o instanceof ExpandableListItem) {
ExpandableListItem parentListItem;
try {
parentListItem = (ExpandableListItem) o;
collapseParentListItem(parentListItem, index, false);
} catch (Exception e) {
e.printStackTrace();
}
}
mDataList.remove(index);
}
notifyItemRangeRemoved(parentIndex + 1, childListItemCount);
expandableListItem.setExpanded(false);
notifyItemRangeChanged(parentIndex + 1, mDataList.size() - parentIndex - 1);
}
if (collapseTriggeredByListItemClick && mExpandCollapseListener != null) {
int expandedCountBeforePosition = getExpandedItemCount(parentIndex);
mExpandCollapseListener.onListItemCollapsed(parentIndex - expandedCountBeforePosition);
}
}
}
/**
* notify item state changed
*/
private void notifyItemExpandedOrCollapsed(int parentIndex, boolean isExpand) {
if (mRecyclerViewList != null && !mRecyclerViewList.isEmpty()) {
RecyclerView recyclerView = mRecyclerViewList.get(0);
BaseAdapterItem viewHolderForAdapterPosition = (BaseAdapterItem) recyclerView.findViewHolderForAdapterPosition(parentIndex);
try {
AbstractAdapterItem<Object> item = viewHolderForAdapterPosition.getItem();
if (item != null && item instanceof AbstractExpandableAdapterItem) {
AbstractExpandableAdapterItem abstractExpandableAdapterItem = (AbstractExpandableAdapterItem) item;
abstractExpandableAdapterItem.onExpansionToggled(isExpand);
}
} catch (Exception e) {
}
}
}
/**
* Collapses all parents in the list.
*/
public void collapseAllParents() {
if (mDataList != null && !mDataList.isEmpty()) {
ArrayList<Object> expandableListItems = getParents(true);
if (expandableListItems != null && !expandableListItems.isEmpty()) {
final int expandedItemSize = expandableListItems.size();
if (expandedItemSize > 0) {
for (int i = 0; i < expandedItemSize; i++) {
Object o = expandableListItems.get(i);
int indexOf = mDataList.indexOf(o);
if (indexOf >= 0)
collapseParentListItem((ExpandableListItem) o, indexOf, false);
}
}
}
}
}
/**
* @return return all parents
*/
@NonNull
private ArrayList<Object> getParents(boolean isExpanded) {
final int size = mDataList.size();
ArrayList<Object> expandableListItems = new ArrayList<>();
for (int i = 0; i < size; i++) {
Object o = mDataList.get(i);
if (o instanceof ExpandableListItem) {
ExpandableListItem expandableListItem = (ExpandableListItem) o;
if (isExpanded) {
if (expandableListItem.isExpanded())
expandableListItems.add(o);
} else {
if (!expandableListItem.isExpanded())
expandableListItems.add(o);
}
}
}
return expandableListItems;
}
/**
* expand index item
*
* @param parentIndex The index of the parent to collapse
*/
protected void expandParentListItem(ExpandableListItem expandableListItem, int parentIndex, boolean expansionTriggeredByListItemClick, boolean isExpandAllChildren) {
if (!expandableListItem.isExpanded()) {
List<?> childItemList = expandableListItem.getChildItemList();
if (childItemList != null && !childItemList.isEmpty()) {
expandableListItem.setExpanded(true);
int childListItemCount = childItemList.size();
for (int i = 0; i < childListItemCount; i++) {
Object o = childItemList.get(i);
int newIndex = parentIndex + i + 1;
mDataList.add(newIndex, o);
notifyItemInserted(newIndex);
if (isExpandAllChildren)
if (o instanceof ExpandableListItem) {
// notifyItemInserted(newIndex);
// if (parentIndex != mDataList.size() - 1)
// notifyItemRangeChanged(parentIndex + 1, mDataList.size() - parentIndex - 1);
expandParentListItem((ExpandableListItem) o, newIndex, expansionTriggeredByListItemClick, isExpandAllChildren);
}
}
// notifyItemRangeInserted(parentIndex + 1, childListItemCount);
int positionStart = parentIndex + childListItemCount;
if (parentIndex != mDataList.size() - 1)
notifyItemRangeChanged(positionStart, mDataList.size() - positionStart);
// notifyItemExpandedOrCollapsed(parentIndex, true);
}
if (expansionTriggeredByListItemClick && mExpandCollapseListener != null) {
int expandedCountBeforePosition = getExpandedItemCount(parentIndex);
mExpandCollapseListener.onListItemExpanded(parentIndex - expandedCountBeforePosition);
}
}
}
/**
* expand specified parent item
*
* @param parentIndex The index of the parent to expand
*/
public void expandParent(int parentIndex) {
if (mDataList != null && !mDataList.isEmpty() && parentIndex >= 0 && parentIndex < mDataList.size()) {
Object o = mDataList.get(parentIndex);
if (o instanceof ExpandableListItem) {
expandParentListItem((ExpandableListItem) o, parentIndex, false, false);
}
}
}
/**
* expand all parents item
*/
public void expandAllParents() {
ArrayList<Object> expandableListItems = getParents(false);
if (expandableListItems != null && !expandableListItems.isEmpty()) {
final int expandedItemSize = expandableListItems.size();
if (expandedItemSize > 0) {
for (int i = 0; i < expandedItemSize; i++) {
Object o = expandableListItems.get(i);
int indexOf = mDataList.indexOf(o);
if (indexOf >= 0)
expandParentListItem((ExpandableListItem) o, indexOf, false, true);
}
}
}
}
// private void expandOrCollaspeParents(boolean isExpand) {
/**
* Gets the number of expanded child list items before the specified position.
*
* @param position The index before which to return the number of expanded
* child list items
* @return The number of expanded child list items before the specified position
*/
private int getExpandedItemCount(int position) {
if (position == 0) {
return 0;
}
int expandedCount = 0;
for (int i = 0; i < position; i++) {
Object listItem = getListItem(i);
if (!(listItem instanceof ExpandableListItem)) {
expandedCount++;
}
}
return expandedCount;
}
/**
* Gets the list item held at the specified adapter position.
*
* @param position The index of the list item to return
* @return The list item at the specified position
*/
protected Object getListItem(int position) {
boolean indexInRange = position >= 0 && position < mDataList.size();
if (indexInRange) {
return mDataList.get(position);
} else {
return null;
}
}
@Override
public long getItemId(int position) {
return position;
}
/**
* instead by{@link #getItemViewType(Object)}
*
* @param position item index
* @return item view type
*/
@Deprecated
@Override
public int getItemViewType(int position) {
mItemType = getItemViewType(mDataList.get(position));
return mUtil.getIntType(mItemType);
}
public Object getItemViewType(Object t) {
return -1;
}
@NonNull
public abstract AbstractAdapterItem<Object> getItemView(Object type);
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new BaseAdapterItem(parent.getContext(), parent, getItemView(mItemType));
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
BaseAdapterItem rcvHolder = (BaseAdapterItem) holder;
Object object = mDataList.get(position);
if (object instanceof ExpandableListItem) {
AbstractExpandableAdapterItem abstractParentAdapterItem = (AbstractExpandableAdapterItem) rcvHolder.getItem();
abstractParentAdapterItem.setParentListItemExpandCollapseListener(this);
}
(rcvHolder).getItem().onUpdateViews(mDataList.get(position), position);
}
@Override
public void onAttachedToRecyclerView(RecyclerView recyclerView) {
super.onAttachedToRecyclerView(recyclerView);
mRecyclerViewList.add(recyclerView);
}
@Override
public void onDetachedFromRecyclerView(RecyclerView recyclerView) {
super.onDetachedFromRecyclerView(recyclerView);
mRecyclerViewList.remove(recyclerView);
}
public interface ExpandCollapseListener {
/**
* Called when a list item is expanded.
*
* @param position The index of the item in the list being expanded
*/
void onListItemExpanded(int position);
/**
* Called when a list item is collapsed.
*
* @param position The index of the item in the list being collapsed
*/
void onListItemCollapsed(int position);
}
}
|
package org.eclipse.persistence.testing.models.employee.relational;
import java.util.*;
import org.eclipse.persistence.testing.models.employee.domain.*;
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.sessions.*;
import org.eclipse.persistence.tools.schemaframework.*;
import org.eclipse.persistence.queries.*;
import org.eclipse.persistence.expressions.*;
import org.eclipse.persistence.testing.framework.TestSystem;
/**
* <b>Purpose</b>: To define system behavior.
* <p><b>Responsibilities</b>: <ul>
* <li> Login and return an initialize database session.
* <li> Create and populate the database.
* </ul>
*/
public class EmployeeSystem extends TestSystem {
protected static boolean useFastTableCreatorAfterInitialCreate = Boolean
.getBoolean("eclipselink.test.toggle-fast-table-creator");
public org.eclipse.persistence.sessions.Project project;
/**
* Use the default EmployeeProject.
*/
public EmployeeSystem() {
this.project = new EmployeeProject();
}
public void createTables(DatabaseSession session) {
dropTableConstraints(session);
new EmployeeTableCreator().replaceTables(session);
}
/**
* Drop table constraints
*/
public void dropTableConstraints(Session session) {
if (!SchemaManager.FAST_TABLE_CREATOR && !useFastTableCreatorAfterInitialCreate) {
if (session.getLogin().getPlatform().isOracle()) {
try {
session.executeNonSelectingCall(new SQLCall("drop table PHONE CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table RESPONS CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table SALARY CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table PROJ_EMP CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table LPROJECT CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table PROJECT CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table EMPLOYEE CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table ADDRESS CASCADE CONSTRAINTS"));
} catch (Exception e) {
}
} else {
try {
session.executeNonSelectingCall(new SQLCall("drop table PHONE"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table RESPONS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table SALARY"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table PROJ_EMP"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table LPROJECT"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table PROJECT"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table EMPLOYEE"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("drop table ADDRESS"));
} catch (Exception e) {
}
}
} else {
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM PHONE"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM RESPONS"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM SALARY"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM PROJ_EMP"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM LPROJECT"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM PROJECT"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM EMPLOYEE"));
} catch (Exception e) {
}
try {
session.executeNonSelectingCall(new SQLCall("DELETE FROM ADDRESS"));
} catch (Exception e) {
}
}
}
/**
* This method demonstrates how a descriptor can be modified after being read with it's project (INI Files).
* The properties of the PhoneNumber's Descriptor provide this method name to be called after the descriptor is built.
* . Add a defined query which will retrieve all phone numbers with area code 613 (local Ottawa numbers).
*/
public static void modifyPhoneDescriptor(ClassDescriptor descriptor) {
// Add a predefined query for retrieving numbers with 613 area code.
ExpressionBuilder builder = new ExpressionBuilder();
ReadAllQuery query = new ReadAllQuery(PhoneNumber.class, builder);
Expression exp = builder.get("id").equal(builder.getParameter("ID"));
query.setSelectionCriteria(exp.and(builder.get("areaCode").equal("613")));
query.addArgument("ID");
descriptor.getQueryManager().addQuery("localNumbers", query);
}
public void addDescriptors(DatabaseSession session) {
if (project == null) {
project = new EmployeeProject();
}
session.addDescriptors(project);
}
/**
* This method will instantiate all of the example instances and insert them into the database
* using the given session.
*/
public void populate(DatabaseSession session) {
EmployeePopulator system = new EmployeePopulator();
UnitOfWork unitOfWork = session.acquireUnitOfWork();
system.buildExamples();
Vector allObjects = new Vector();
PopulationManager.getDefaultManager().addAllObjectsForClass(Employee.class, allObjects);
PopulationManager.getDefaultManager().addAllObjectsForClass(SmallProject.class, allObjects);
PopulationManager.getDefaultManager().addAllObjectsForClass(LargeProject.class, allObjects);
unitOfWork.registerAllObjects(allObjects);
unitOfWork.commit();
}
}
|
package com.orientechnologies.orient.core;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryNotificationInfo;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryType;
import java.util.Collection;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.management.Notification;
import javax.management.NotificationEmitter;
import javax.management.NotificationListener;
import com.orientechnologies.common.io.OFileUtils;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.common.profiler.OProfiler;
import com.orientechnologies.common.profiler.OProfiler.OProfilerHookValue;
import com.orientechnologies.orient.core.OMemoryWatchDog.Listener.TYPE;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
/**
* This memory warning system will call the listener when we exceed the percentage of available memory specified. There should only
* be one instance of this object created, since the usage threshold can only be set to one number.
*/
public class OMemoryWatchDog {
private final Collection<Listener> listeners = new CopyOnWriteArrayList<Listener>();
private static final MemoryPoolMXBean tenuredGenPool = findTenuredGenPool();
private int alertTimes = 0;
public interface Listener {
public enum TYPE {
OS, JVM
}
/**
* Execute a soft free of memory resources.
*
* @param iType
* OS or JVM
* @param iUsedMemory
* Current used memory
* @param iMaxMemory
* Max memory
*/
public void memoryUsageLow(TYPE iType, long iUsedMemory, long iMaxMemory);
/**
* Execute a hard free of memory resources.
*
* @param iType
* OS or JVM
* @param iUsedMemory
* Current used memory
* @param iMaxMemory
* Max memory
*/
public void memoryUsageCritical(TYPE iType, long iUsedMemory, long iMaxMemory);
}
/**
* Create the memory watch dog with the default memory threshold.
*
* @param iThreshold
*/
public OMemoryWatchDog(final float iThreshold) {
OMemoryWatchDog.setPercentageUsageThreshold(iThreshold);
final MemoryMXBean memBean = ManagementFactory.getMemoryMXBean();
final NotificationEmitter memEmitter = (NotificationEmitter) memBean;
memEmitter.addNotificationListener(new NotificationListener() {
public synchronized void handleNotification(Notification n, Object hb) {
if (n.getType().equals(MemoryNotificationInfo.MEMORY_THRESHOLD_EXCEEDED)) {
alertTimes++;
long maxMemory = tenuredGenPool.getUsage().getMax();
long usedMemory = tenuredGenPool.getUsage().getUsed();
long freeMemory = maxMemory - usedMemory;
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this,
"Free memory is low %s %s%% (used %s of %s), calling listeners to free memory in SOFT way...",
OFileUtils.getSizeAsString(freeMemory), freeMemory * 100 / maxMemory, OFileUtils.getSizeAsString(usedMemory),
OFileUtils.getSizeAsString(maxMemory));
final long timer = OProfiler.getInstance().startChrono();
for (Listener listener : listeners) {
try {
listener.memoryUsageLow(TYPE.JVM, usedMemory, maxMemory);
} catch (Exception e) {
e.printStackTrace();
}
}
long threshold;
do {
// INVOKE GC AND WAIT A BIT
freeMemory(100);
// RECHECK IF MEMORY IS OK NOW
maxMemory = tenuredGenPool.getUsage().getMax();
usedMemory = tenuredGenPool.getUsage().getUsed();
freeMemory = maxMemory - usedMemory;
threshold = (long) (maxMemory * (1 - OGlobalConfiguration.MEMORY_OPTIMIZE_THRESHOLD.getValueAsFloat()));
if (OLogManager.instance().isDebugEnabled())
OLogManager.instance().debug(this, "Free memory now is %s %s%% (used %s of %s) with threshold for HARD clean is %s",
OFileUtils.getSizeAsString(freeMemory), freeMemory * 100 / maxMemory, OFileUtils.getSizeAsString(usedMemory),
OFileUtils.getSizeAsString(maxMemory), OFileUtils.getSizeAsString(threshold));
if (freeMemory < threshold) {
if (OLogManager.instance().isDebugEnabled())
OLogManager
.instance()
.debug(
this,
"Free memory is low %s %s%% (used %s of %s) while the threshold is %s, calling listeners to free memory in HARD way...",
OFileUtils.getSizeAsString(freeMemory), freeMemory * 100 / maxMemory,
OFileUtils.getSizeAsString(usedMemory), OFileUtils.getSizeAsString(maxMemory),
OFileUtils.getSizeAsString(threshold));
for (Listener listener : listeners) {
try {
listener.memoryUsageCritical(TYPE.JVM, usedMemory, maxMemory);
} catch (Exception e) {
e.printStackTrace();
}
}
}
} while (freeMemory < threshold);
OProfiler.getInstance().stopChrono("OMemoryWatchDog.freeResources", timer);
}
}
}, null, null);
OProfiler.getInstance().registerHookValue("memory.alerts", new OProfilerHookValue() {
public Object getValue() {
return alertTimes;
}
});
}
public Collection<Listener> getListeners() {
return listeners;
}
public Listener addListener(Listener listener) {
listeners.add(listener);
return listener;
}
public boolean removeListener(Listener listener) {
return listeners.remove(listener);
}
public static void setPercentageUsageThreshold(double percentage) {
if (percentage <= 0.0 || percentage > 1.0) {
throw new IllegalArgumentException("Percentage not in range");
}
long maxMemory = tenuredGenPool.getUsage().getMax();
long warningThreshold = (long) (maxMemory * percentage);
tenuredGenPool.setUsageThreshold(warningThreshold);
}
/**
* Tenured Space Pool can be determined by it being of type HEAP and by it being possible to set the usage threshold.
*/
private static MemoryPoolMXBean findTenuredGenPool() {
for (MemoryPoolMXBean pool : ManagementFactory.getMemoryPoolMXBeans()) {
// I don't know whether this approach is better, or whether
// we should rather check for the pool name "Tenured Gen"?
if (pool.getType() == MemoryType.HEAP && pool.isUsageThresholdSupported()) {
return pool;
}
}
throw new AssertionError("Could not find tenured space");
}
public static void freeMemory(final long iDelayTime) {
// INVOKE GC AND WAIT A BIT
System.gc();
if (iDelayTime > 0)
try {
Thread.sleep(iDelayTime);
} catch (InterruptedException e) {
}
}
}
|
package org.innovateuk.ifs.project.core.transactional;
import org.innovateuk.ifs.application.domain.Application;
import org.innovateuk.ifs.application.resource.FundingDecision;
import org.innovateuk.ifs.commons.error.Error;
import org.innovateuk.ifs.commons.service.BaseFailingOrSucceedingResult;
import org.innovateuk.ifs.commons.service.ServiceResult;
import org.innovateuk.ifs.fundingdecision.domain.FundingDecisionStatus;
import org.innovateuk.ifs.organisation.domain.Organisation;
import org.innovateuk.ifs.organisation.mapper.OrganisationMapper;
import org.innovateuk.ifs.organisation.resource.OrganisationResource;
import org.innovateuk.ifs.project.core.domain.PartnerOrganisation;
import org.innovateuk.ifs.project.core.domain.Project;
import org.innovateuk.ifs.project.core.domain.ProjectParticipantRole;
import org.innovateuk.ifs.project.core.domain.ProjectUser;
import org.innovateuk.ifs.project.core.workflow.configuration.ProjectWorkflowHandler;
import org.innovateuk.ifs.project.financechecks.transactional.FinanceChecksGenerator;
import org.innovateuk.ifs.project.financechecks.workflow.financechecks.configuration.EligibilityWorkflowHandler;
import org.innovateuk.ifs.project.financechecks.workflow.financechecks.configuration.ViabilityWorkflowHandler;
import org.innovateuk.ifs.project.grantofferletter.configuration.workflow.GrantOfferLetterWorkflowHandler;
import org.innovateuk.ifs.project.monitor.domain.ProjectMonitoringOfficer;
import org.innovateuk.ifs.project.monitor.repository.ProjectMonitoringOfficerRepository;
import org.innovateuk.ifs.project.projectdetails.workflow.configuration.ProjectDetailsWorkflowHandler;
import org.innovateuk.ifs.project.resource.ProjectResource;
import org.innovateuk.ifs.project.resource.ProjectUserResource;
import org.innovateuk.ifs.project.spendprofile.configuration.workflow.SpendProfileWorkflowHandler;
import org.innovateuk.ifs.project.spendprofile.transactional.CostCategoryTypeStrategy;
import org.innovateuk.ifs.user.domain.ProcessRole;
import org.innovateuk.ifs.user.domain.User;
import org.innovateuk.ifs.user.resource.Role;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Stream.concat;
import static org.innovateuk.ifs.commons.error.CommonErrors.badRequestError;
import static org.innovateuk.ifs.commons.error.CommonErrors.notFoundError;
import static org.innovateuk.ifs.commons.error.CommonFailureKeys.*;
import static org.innovateuk.ifs.commons.service.ServiceResult.*;
import static org.innovateuk.ifs.project.core.domain.ProjectParticipantRole.PROJECT_PARTNER;
import static org.innovateuk.ifs.util.CollectionFunctions.*;
import static org.innovateuk.ifs.util.EntityLookupCallbacks.find;
import static org.springframework.http.HttpStatus.NOT_FOUND;
@Service
public class ProjectServiceImpl extends AbstractProjectServiceImpl implements ProjectService {
@Autowired
private OrganisationMapper organisationMapper;
@Autowired
private ProjectDetailsWorkflowHandler projectDetailsWorkflowHandler;
@Autowired
private ViabilityWorkflowHandler viabilityWorkflowHandler;
@Autowired
private EligibilityWorkflowHandler eligibilityWorkflowHandler;
@Autowired
private GrantOfferLetterWorkflowHandler golWorkflowHandler;
@Autowired
private ProjectWorkflowHandler projectWorkflowHandler;
@Autowired
private CostCategoryTypeStrategy costCategoryTypeStrategy;
@Autowired
private FinanceChecksGenerator financeChecksGenerator;
@Autowired
private SpendProfileWorkflowHandler spendProfileWorkflowHandler;
@Autowired
private ProjectMonitoringOfficerRepository projectMonitoringOfficerRepository;
@Override
public ServiceResult<ProjectResource> getProjectById(Long projectId) {
return getProject(projectId).andOnSuccessReturn(projectMapper::mapToResource);
}
@Override
public ServiceResult<ProjectResource> getByApplicationId(Long applicationId) {
return getProjectByApplication(applicationId).andOnSuccessReturn(projectMapper::mapToResource);
}
@Override
@Transactional
public ServiceResult<Void> createProjectsFromFundingDecisions(Map<Long, FundingDecision> applicationFundingDecisions) {
List<ServiceResult<ProjectResource>> projectCreationResults = applicationFundingDecisions
.keySet()
.stream()
.filter(d -> applicationFundingDecisions.get(d).equals(FundingDecision.FUNDED))
.map(this::createSingletonProjectFromApplicationId)
.collect(toList());
boolean anyProjectCreationFailed = simpleAnyMatch(projectCreationResults, BaseFailingOrSucceedingResult::isFailure);
return anyProjectCreationFailed ?
serviceFailure(CREATE_PROJECT_FROM_APPLICATION_FAILS) : serviceSuccess();
}
@Override
@Transactional
public ServiceResult<List<ProjectResource>> findByUserId(final Long userId) {
List<ProjectUser> projectUsers = projectUserRepository.findByUserId(userId);
List<ProjectMonitoringOfficer> monitoringOfficers = projectMonitoringOfficerRepository.findByUserId(userId);
List<Project> projects = simpleMap(projectUsers, ProjectUser::getProcess);
List<Project> monitoringOfficerProjects = simpleMap(monitoringOfficers, ProjectMonitoringOfficer::getProcess);
return serviceSuccess(
concat(projects.stream(), monitoringOfficerProjects.stream())
.distinct()
.map(projectMapper::mapToResource)
.collect(toList())
);
}
@Override
public ServiceResult<List<ProjectUserResource>> getProjectUsers(Long projectId) {
return serviceSuccess(simpleMap(getProjectUsersByProjectId(projectId), projectUserMapper::mapToResource));
}
@Override
@Transactional
public ServiceResult<ProjectUser> addPartner(Long projectId, Long userId, Long organisationId) {
return find(getProject(projectId), getOrganisation(organisationId), getUser(userId)).
andOnSuccess((project, organisation, user) -> {
if (project.getOrganisations(o -> organisationId.equals(o.getId())).isEmpty()) {
return serviceFailure(badRequestError("project does not contain organisation"));
}
addProcessRoles(project, user, organisation);
return addProjectPartner(project, user, organisation);
});
}
private ServiceResult<ProjectUser> addProjectPartner(Project project, User user, Organisation organisation){
List<ProjectUser> partners = project.getProjectUsersWithRole(PROJECT_PARTNER);
Optional<ProjectUser> projectUser = simpleFindFirst(partners, p -> p.getUser().getId().equals(user.getId()));
if (projectUser.isPresent()) {
return serviceSuccess(projectUser.get()); // Already a partner
} else {
ProjectUser pu = new ProjectUser(user, project, PROJECT_PARTNER, organisation);
return serviceSuccess(pu);
}
}
private void addProcessRoles(Project project, User user, Organisation organisation) {
Application application = project.getApplication();
ProcessRole processRole = new ProcessRole(user, application.getId(), Role.COLLABORATOR, organisation.getId());
processRoleRepository.save(processRole);
}
@Override
public ServiceResult<OrganisationResource> getOrganisationByProjectAndUser(Long projectId, Long userId) {
ProjectUser projectUser = projectUserRepository.findByProjectIdAndRoleAndUserId(projectId, PROJECT_PARTNER, userId);
if (projectUser != null && projectUser.getOrganisation() != null) {
return serviceSuccess(organisationMapper.mapToResource(organisationRepository.findById(projectUser.getOrganisation().getId()).orElse(null)));
} else {
return serviceFailure(new Error(CANNOT_FIND_ORG_FOR_GIVEN_PROJECT_AND_USER, NOT_FOUND));
}
}
@Override
public ServiceResult<List<ProjectResource>> findAll() {
return serviceSuccess(projectsToResources(projectRepository.findAll()));
}
private List<ProjectResource> projectsToResources(List<Project> filtered) {
return simpleMap(filtered, project -> projectMapper.mapToResource(project));
}
@Override
@Transactional
public ServiceResult<ProjectResource> createProjectFromApplication(Long applicationId) {
return getApplication(applicationId).andOnSuccess(application -> {
if (FundingDecisionStatus.FUNDED.equals(application.getFundingDecision())) {
return createSingletonProjectFromApplicationId(applicationId);
} else {
return serviceFailure(CREATE_PROJECT_FROM_APPLICATION_FAILS);
}
});
}
@Override
@Transactional
public ServiceResult<Void> withdrawProject(long projectId) {
return getProject(projectId).andOnSuccess(
existingProject -> getCurrentlyLoggedInUser().andOnSuccess(user ->
projectWorkflowHandler.projectWithdrawn(existingProject, user) ?
serviceSuccess() : serviceFailure(PROJECT_CANNOT_BE_WITHDRAWN))
);
}
@Override
@Transactional
public ServiceResult<Void> handleProjectOffline(long projectId) {
return getProject(projectId).andOnSuccess(
existingProject -> getCurrentlyLoggedInUser().andOnSuccess(user ->
projectWorkflowHandler.handleProjectOffline(existingProject, user) ?
serviceSuccess() : serviceFailure(PROJECT_CANNOT_BE_HANDLED_OFFLINE)));
}
@Override
@Transactional
public ServiceResult<Void> completeProjectOffline(long projectId) {
return getProject(projectId).andOnSuccess(
existingProject -> getCurrentlyLoggedInUser().andOnSuccess(user ->
projectWorkflowHandler.completeProjectOffline(existingProject, user) ?
serviceSuccess() : serviceFailure(PROJECT_CANNOT_BE_COMPLETED_OFFLINE)));
}
private ServiceResult<ProjectResource> createSingletonProjectFromApplicationId(final Long applicationId) {
return checkForExistingProjectWithApplicationId(applicationId).handleSuccessOrFailure(
failure -> createProjectFromApplicationId(applicationId),
success -> serviceSuccess(success)
);
}
private ServiceResult<ProjectResource> checkForExistingProjectWithApplicationId(Long applicationId) {
return getByApplicationId(applicationId);
}
private ServiceResult<ProjectResource> createProjectFromApplicationId(final Long applicationId) {
return getApplication(applicationId).andOnSuccess(application -> {
Project project = new Project();
project.setApplication(application);
project.setDurationInMonths(application.getDurationInMonths());
project.setName(application.getName());
project.setTargetStartDate(application.getStartDate());
ProcessRole leadApplicantRole = simpleFindFirst(application.getProcessRoles(), ProcessRole::isLeadApplicant).get();
List<ProcessRole> collaborativeRoles = simpleFilter(application.getProcessRoles(), ProcessRole::isCollaborator);
List<ProcessRole> allRoles = combineLists(leadApplicantRole, collaborativeRoles);
List<ServiceResult<ProjectUser>> correspondingProjectUsers = simpleMap(allRoles,
role -> {
Organisation organisation = organisationRepository.findById(role.getOrganisationId()).orElse(null);
return createPartnerProjectUser(project, role.getUser(), organisation);
});
ServiceResult<List<ProjectUser>> projectUserCollection = aggregate(correspondingProjectUsers);
ServiceResult<Project> saveProjectResult = projectUserCollection.andOnSuccessReturn(projectUsers -> {
List<Organisation> uniqueOrganisations =
removeDuplicates(simpleMap(projectUsers, ProjectUser::getOrganisation));
List<PartnerOrganisation> partnerOrganisations = simpleMap(uniqueOrganisations, org ->
createPartnerOrganisation(application, project, org, leadApplicantRole));
project.setProjectUsers(projectUsers);
project.setPartnerOrganisations(partnerOrganisations);
return projectRepository.save(project);
});
return saveProjectResult.
andOnSuccess(newProject -> createProcessEntriesForNewProject(newProject).
andOnSuccess(() -> generateFinanceCheckEntitiesForNewProject(newProject)).
andOnSuccessReturn(() -> projectMapper.mapToResource(newProject)));
});
}
private PartnerOrganisation createPartnerOrganisation(Application application, Project project, Organisation org, ProcessRole leadApplicantRole) {
PartnerOrganisation partnerOrganisation = new PartnerOrganisation(project, org, org.getId().equals(leadApplicantRole.getOrganisationId()));
simpleFindFirst(application.getApplicationFinances(), applicationFinance -> applicationFinance.getOrganisation().getId().equals(org.getId()))
.ifPresent(applicationFinance -> partnerOrganisation.setPostcode(applicationFinance.getWorkPostcode()));
return partnerOrganisation;
}
private ServiceResult<ProjectUser> createPartnerProjectUser(Project project, User user, Organisation organisation) {
return createProjectUserForRole(project, user, organisation, PROJECT_PARTNER);
}
private ServiceResult<ProjectUser> createProjectUserForRole(Project project, User user, Organisation organisation, ProjectParticipantRole role) {
return serviceSuccess(new ProjectUser(user, project, role, organisation));
}
private ServiceResult<Void> createProcessEntriesForNewProject(Project newProject) {
ProjectUser originalLeadApplicantProjectUser = newProject.getProjectUsers().get(0);
ServiceResult<Void> projectDetailsProcess = createProjectDetailsProcess(newProject, originalLeadApplicantProjectUser);
ServiceResult<Void> viabilityProcesses = createViabilityProcesses(newProject.getPartnerOrganisations(), originalLeadApplicantProjectUser);
ServiceResult<Void> eligibilityProcesses = createEligibilityProcesses(newProject.getPartnerOrganisations(), originalLeadApplicantProjectUser);
ServiceResult<Void> golProcess = createGOLProcess(newProject, originalLeadApplicantProjectUser);
ServiceResult<Void> projectProcess = createProjectProcess(newProject, originalLeadApplicantProjectUser);
ServiceResult<Void> spendProfileProcess = createSpendProfileProcess(newProject, originalLeadApplicantProjectUser);
return processAnyFailuresOrSucceed(projectDetailsProcess, viabilityProcesses, eligibilityProcesses, golProcess, projectProcess, spendProfileProcess);
}
private ServiceResult<Void> createProjectDetailsProcess(Project newProject, ProjectUser originalLeadApplicantProjectUser) {
if (projectDetailsWorkflowHandler.projectCreated(newProject, originalLeadApplicantProjectUser)) {
return serviceSuccess();
} else {
return serviceFailure(PROJECT_SETUP_UNABLE_TO_CREATE_PROJECT_PROCESSES);
}
}
private ServiceResult<Void> createViabilityProcesses(List<PartnerOrganisation> partnerOrganisations, ProjectUser originalLeadApplicantProjectUser) {
List<ServiceResult<Void>> results = simpleMap(partnerOrganisations, partnerOrganisation ->
viabilityWorkflowHandler.projectCreated(partnerOrganisation, originalLeadApplicantProjectUser) ?
serviceSuccess() :
serviceFailure(PROJECT_SETUP_UNABLE_TO_CREATE_PROJECT_PROCESSES));
return aggregate(results).andOnSuccessReturnVoid();
}
private ServiceResult<Void> createEligibilityProcesses(List<PartnerOrganisation> partnerOrganisations, ProjectUser originalLeadApplicantProjectUser) {
List<ServiceResult<Void>> results = simpleMap(partnerOrganisations, partnerOrganisation ->
eligibilityWorkflowHandler.projectCreated(partnerOrganisation, originalLeadApplicantProjectUser) ?
serviceSuccess() :
serviceFailure(PROJECT_SETUP_UNABLE_TO_CREATE_PROJECT_PROCESSES));
return aggregate(results).andOnSuccessReturnVoid();
}
private ServiceResult<Void> createGOLProcess(Project newProject, ProjectUser originalLeadApplicantProjectUser) {
if (golWorkflowHandler.projectCreated(newProject, originalLeadApplicantProjectUser)) {
return serviceSuccess();
} else {
return serviceFailure(PROJECT_SETUP_UNABLE_TO_CREATE_PROJECT_PROCESSES);
}
}
private ServiceResult<Void> createProjectProcess(Project newProject, ProjectUser originalLeadApplicantProjectUser) {
if (projectWorkflowHandler.projectCreated(newProject, originalLeadApplicantProjectUser)) {
return serviceSuccess();
} else {
return serviceFailure(PROJECT_SETUP_UNABLE_TO_CREATE_PROJECT_PROCESSES);
}
}
private ServiceResult<Void> createSpendProfileProcess(Project newProject, ProjectUser originalLeadApplicantProjectUser) {
if (spendProfileWorkflowHandler.projectCreated(newProject, originalLeadApplicantProjectUser)) {
return serviceSuccess();
} else {
return serviceFailure(PROJECT_SETUP_UNABLE_TO_CREATE_PROJECT_PROCESSES);
}
}
private ServiceResult<Void> generateFinanceCheckEntitiesForNewProject(Project newProject) {
List<Organisation> organisations = newProject.getOrganisations();
List<ServiceResult<Void>> financeCheckResults = simpleMap(organisations, organisation ->
financeChecksGenerator.createFinanceChecksFigures(newProject, organisation).andOnSuccess(() ->
costCategoryTypeStrategy.getOrCreateCostCategoryTypeForSpendProfile(newProject.getId(), organisation.getId()).andOnSuccess(costCategoryType ->
financeChecksGenerator.createMvpFinanceChecksFigures(newProject, organisation, costCategoryType))));
return processAnyFailuresOrSucceed(financeCheckResults);
}
private ServiceResult<Project> getProjectByApplication(long applicationId) {
return find(projectRepository.findOneByApplicationId(applicationId), notFoundError(Project.class, applicationId));
}
}
|
package org.mustbe.consulo.ironPython.module.extension;
import org.consulo.module.extension.MutableModuleInheritableNamedPointer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mustbe.consulo.dotnet.module.extension.DotNetModuleExtension;
import org.mustbe.consulo.ironPython.module.IronPythonConfigurationLayer;
import org.mustbe.consulo.module.extension.ChildLayeredModuleExtensionImpl;
import org.mustbe.consulo.module.extension.ConfigurationLayer;
import org.mustbe.consulo.module.extension.LayeredModuleExtension;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkType;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.jetbrains.python.sdk.PythonSdkType;
/**
* @author VISTALL
* @since 11.02.14
*/
public class IronPythonModuleExtension extends ChildLayeredModuleExtensionImpl<IronPythonModuleExtension> implements
BaseIronPythonModuleExtension<IronPythonModuleExtension>
{
public IronPythonModuleExtension(@NotNull String id, @NotNull ModifiableRootModel modifiableRootModel)
{
super(id, modifiableRootModel);
}
@NotNull
@Override
public Class<? extends LayeredModuleExtension> getHeadClass()
{
return DotNetModuleExtension.class;
}
@NotNull
@Override
protected ConfigurationLayer createLayer()
{
return new IronPythonConfigurationLayer(this);
}
@NotNull
@Override
public MutableModuleInheritableNamedPointer<Sdk> getInheritableSdk()
{
IronPythonConfigurationLayer currentProfileEx = (IronPythonConfigurationLayer) getCurrentLayer();
return currentProfileEx.getInheritableSdk();
}
@Nullable
@Override
public Sdk getSdk()
{
IronPythonConfigurationLayer currentProfileEx = (IronPythonConfigurationLayer) getCurrentLayer();
return currentProfileEx.getInheritableSdk().get();
}
@Nullable
@Override
public String getSdkName()
{
IronPythonConfigurationLayer currentProfileEx = (IronPythonConfigurationLayer) getCurrentLayer();
return currentProfileEx.getInheritableSdk().getName();
}
@NotNull
@Override
public Class<? extends SdkType> getSdkTypeClass()
{
return PythonSdkType.class;
}
}
|
package org.eclipse.jetty.osgi.boot.utils.internal;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceEvent;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
import org.osgi.service.packageadmin.PackageAdmin;
import org.osgi.service.startlevel.StartLevel;
/**
* When the PackageAdmin service is activated we can look for the fragments
* attached to this bundle and "activate" them.
*/
public class PackageAdminServiceTracker implements ServiceListener
{
private BundleContext _context;
private List<BundleActivator> _activatedFragments = new ArrayList<BundleActivator>();
private boolean _fragmentsWereActivated = false;
//Use the deprecated StartLevel to stay compatible with older versions of OSGi.
private StartLevel _startLevel;
private int _maxStartLevel = 6;
public static PackageAdminServiceTracker INSTANCE = null;
public PackageAdminServiceTracker(BundleContext context)
{
INSTANCE = this;
_context = context;
if (!setup())
{
try
{
_context.addServiceListener(this,"(objectclass=" + PackageAdmin.class.getName() + ")");
}
catch (InvalidSyntaxException e)
{
e.printStackTrace(); // won't happen
}
}
}
/**
* @return true if the fragments were activated by this method.
*/
private boolean setup()
{
ServiceReference sr = _context.getServiceReference(PackageAdmin.class.getName());
_fragmentsWereActivated = sr != null;
if (sr != null)
invokeFragmentActivators(sr);
sr = _context.getServiceReference(StartLevel.class.getName());
if (sr != null)
{
_startLevel = (StartLevel)_context.getService(sr);
try
{
_maxStartLevel = Integer.parseInt(System.getProperty("osgi.startLevel","6"));
}
catch (Exception e)
{
//nevermind default on the usual.
_maxStartLevel = 6;
}
}
return _fragmentsWereActivated;
}
/**
* Invokes the optional BundleActivator in each fragment. By convention the
* bundle activator for a fragment must be in the package that is defined by
* the symbolic name of the fragment and the name of the class must be
* 'FragmentActivator'.
*
* @param event
* The <code>ServiceEvent</code> object.
*/
public void serviceChanged(ServiceEvent event)
{
if (event.getType() == ServiceEvent.REGISTERED)
{
invokeFragmentActivators(event.getServiceReference());
}
}
/**
* Helper to access the PackageAdmin and return the fragments hosted by a bundle.
* when we drop the support for the older versions of OSGi, we will stop using the PackageAdmin
* service.
* @param bundle
* @return
*/
public Bundle[] getFragments(Bundle bundle)
{
ServiceReference sr = _context.getServiceReference(PackageAdmin.class.getName());
if (sr == null)
{//we should never be here really.
return null;
}
PackageAdmin admin = (PackageAdmin)_context.getService(sr);
return admin.getFragments(bundle);
}
/**
* Returns the fragments and the required-bundles of a bundle.
* Recursively collect the required-bundles and fragment when the directive visibility:=reexport
* is added to a required-bundle.
* @param bundle
* @param webFragOrAnnotationOrResources
* @return
*/
public Bundle[] getFragmentsAndRequiredBundles(Bundle bundle)
{
ServiceReference sr = _context.getServiceReference(PackageAdmin.class.getName());
if (sr == null)
{//we should never be here really.
return null;
}
PackageAdmin admin = (PackageAdmin)_context.getService(sr);
LinkedHashMap<String,Bundle> deps = new LinkedHashMap<String,Bundle>();
collectFragmentsAndRequiredBundles(bundle, admin, deps, false);
return deps.values().toArray(new Bundle[deps.size()]);
}
/**
* Returns the fragments and the required-bundles. Collects them transitively when the directive 'visibility:=reexport'
* is added to a required-bundle.
* @param bundle
* @param webFragOrAnnotationOrResources
* @return
*/
protected void collectFragmentsAndRequiredBundles(Bundle bundle, PackageAdmin admin, Map<String,Bundle> deps, boolean onlyReexport)
{
Bundle[] fragments = admin.getFragments(bundle);
if (fragments != null)
{
//Also add the bundles required by the fragments.
//this way we can inject onto an existing web-bundle a set of bundles that extend it
for (Bundle f : fragments)
{
if (!deps.keySet().contains(f.getSymbolicName()))
{
deps.put(f.getSymbolicName(), f);
collectRequiredBundles(f, admin, deps, onlyReexport);
}
}
}
collectRequiredBundles(bundle, admin, deps, onlyReexport);
}
/**
* A simplistic but good enough parser for the Require-Bundle header.
* Parses the version range attribute and the visibility directive.
*
* @param onlyReexport true to collect resources and web-fragments transitively if and only if the directive visibility is reexport.
* @param bundle
* @return The map of required bundles associated to the value of the jetty-web attribute.
*/
protected void collectRequiredBundles(Bundle bundle, PackageAdmin admin, Map<String,Bundle> deps, boolean onlyReexport)
{
String requiredBundleHeader = (String)bundle.getHeaders().get("Require-Bundle");
if (requiredBundleHeader == null)
{
return;
}
StringTokenizer tokenizer = new ManifestTokenizer(requiredBundleHeader);
while (tokenizer.hasMoreTokens())
{
String tok = tokenizer.nextToken().trim();
StringTokenizer tokenizer2 = new StringTokenizer(tok, ";");
String symbolicName = tokenizer2.nextToken().trim();
if (deps.keySet().contains(symbolicName))
{
//was already added. 2 dependencies pointing at the same bundle.
continue;
}
String versionRange = null;
boolean reexport = false;
while (tokenizer2.hasMoreTokens())
{
String next = tokenizer2.nextToken().trim();
if (next.startsWith("bundle-version="))
{
if (next.startsWith("bundle-version=\"") || next.startsWith("bundle-version='"))
{
versionRange = next.substring("bundle-version=\"".length(), next.length()-1);
}
else
{
versionRange = next.substring("bundle-version=".length());
}
}
else if (next.equals("visibility:=reexport"))
{
reexport = true;
}
}
if (!reexport && onlyReexport)
{
return;
}
Bundle[] reqBundles = admin.getBundles(symbolicName, versionRange);
if (reqBundles != null && reqBundles.length != 0)
{
Bundle reqBundle = null;
for (Bundle b : reqBundles)
{
if (b.getState() == Bundle.ACTIVE || b.getState() == Bundle.STARTING)
{
reqBundle = b;
break;
}
}
if (reqBundle == null)
{
//strange? in OSGi with Require-Bundle,
//the dependent bundle is supposed to be active already
reqBundle = reqBundles[0];
}
deps.put(reqBundle.getSymbolicName(),reqBundle);
collectFragmentsAndRequiredBundles(reqBundle, admin, deps, true);
}
}
}
private void invokeFragmentActivators(ServiceReference sr)
{
PackageAdmin admin = (PackageAdmin)_context.getService(sr);
Bundle[] fragments = admin.getFragments(_context.getBundle());
if (fragments == null)
{
return;
}
for (Bundle frag : fragments)
{
// find a convention to look for a class inside the fragment.
try
{
String fragmentActivator = frag.getSymbolicName() + ".FragmentActivator";
Class<?> c = Class.forName(fragmentActivator);
if (c != null)
{
BundleActivator bActivator = (BundleActivator)c.newInstance();
bActivator.start(_context);
_activatedFragments.add(bActivator);
}
}
catch (NullPointerException e)
{
// e.printStackTrace();
}
catch (InstantiationException e)
{
// e.printStackTrace();
}
catch (IllegalAccessException e)
{
// e.printStackTrace();
}
catch (ClassNotFoundException e)
{
// e.printStackTrace();
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
public void stop()
{
INSTANCE = null;
for (BundleActivator fragAct : _activatedFragments)
{
try
{
fragAct.stop(_context);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
/**
* @return true if the framework has completed all the start levels.
*/
public boolean frameworkHasCompletedAutostarts()
{
return _startLevel == null ? true : _startLevel.getStartLevel() >= _maxStartLevel;
}
private static class ManifestTokenizer extends StringTokenizer {
public ManifestTokenizer(String header) {
super(header, ",");
}
@Override
public String nextToken() {
String token = super.nextToken();
while (hasOpenQuote(token) && hasMoreTokens()) {
token += "," + super.nextToken();
}
return token;
}
private boolean hasOpenQuote(String token) {
int i = -1;
do {
int quote = getQuote(token, i+1);
if (quote < 0) {
return false;
}
i = token.indexOf(quote, i+1);
i = token.indexOf(quote, i+1);
} while (i >= 0);
return true;
}
private int getQuote(String token, int offset) {
int i = token.indexOf('"', offset);
int j = token.indexOf('\'', offset);
if (i < 0) {
if (j < 0) {
return -1;
} else {
return '\'';
}
}
if (j < 0) {
return '"';
}
if (i < j) {
return '"';
}
return '\'';
}
}
}
|
package org.jboss.osgi.framework.internal;
import static org.jboss.osgi.framework.internal.FrameworkLogger.LOGGER;
import static org.jboss.osgi.framework.internal.FrameworkMessages.MESSAGES;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import org.jboss.msc.service.ServiceController.Mode;
import org.jboss.msc.service.ServiceName;
import org.jboss.osgi.deployment.deployer.Deployment;
import org.jboss.osgi.deployment.interceptor.LifecycleInterceptorException;
import org.jboss.osgi.framework.StorageState;
import org.jboss.osgi.framework.internal.AbstractBundleState.BundleLock.Method;
import org.jboss.osgi.framework.internal.BundleStoragePlugin.InternalStorageState;
import org.jboss.osgi.metadata.ActivationPolicyMetaData;
import org.jboss.osgi.metadata.OSGiMetaData;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleEvent;
import org.osgi.framework.BundleException;
import org.osgi.framework.Constants;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.framework.wiring.BundleWiring;
import org.osgi.resource.Wire;
import org.osgi.service.resolver.ResolutionException;
import org.osgi.service.startlevel.StartLevel;
/**
* Represents the INSTALLED state of a host bundle.
*
* @author thomas.diesler@jboss.com
* @author <a href="david@redhat.com">David Bosschaert</a>
*/
final class HostBundleState extends UserBundleState {
private final AtomicBoolean alreadyStarting = new AtomicBoolean();
private final AtomicBoolean awaitLazyActivation = new AtomicBoolean();
private BundleActivator bundleActivator;
HostBundleState(FrameworkState frameworkState, HostBundleRevision revision, ServiceName serviceName) {
super(frameworkState, revision, serviceName);
}
static HostBundleState assertBundleState(Bundle bundle) {
AbstractBundleState bundleState = AbstractBundleState.assertBundleState(bundle);
assert bundleState instanceof HostBundleState : "Not a HostBundleState: " + bundleState;
return (HostBundleState) bundleState;
}
void initLazyActivation() {
awaitLazyActivation.set(isActivationLazy());
}
@Override
HostBundleContext createContextInternal() {
return new HostBundleContext(this);
}
@Override
HostBundleRevision createUpdateRevision(Deployment dep, OSGiMetaData metadata, InternalStorageState storageState) throws BundleException {
return new HostBundleRevision(getFrameworkState(), dep, metadata, storageState);
}
// Invalid discovery of Bundle.getBundleContext() method
public BundleContext getBundleContext() {
return super.getBundleContext();
}
@Override
public boolean isFragment() {
return false;
}
int getStartLevel() {
return getStorageState().getStartLevel();
}
void setStartLevel(int level) {
LOGGER.debugf("Setting bundle start level %d for: %s", level, this);
InternalStorageState storageState = getStorageState();
storageState.setStartLevel(level);
}
@Override
public HostBundleRevision getBundleRevision() {
return (HostBundleRevision) super.getBundleRevision();
}
boolean isPersistentlyStarted() {
StorageState storageState = getStorageState();
return storageState.isPersistentlyStarted();
}
boolean isActivationLazy() {
ActivationPolicyMetaData activationPolicy = getActivationPolicy();
String policyType = (activationPolicy != null ? activationPolicy.getType() : null);
return Constants.ACTIVATION_LAZY.equals(policyType);
}
ActivationPolicyMetaData getActivationPolicy() {
return getOSGiMetaData().getBundleActivationPolicy();
}
boolean awaitLazyActivation() {
return awaitLazyActivation.get();
}
void activateLazily() throws BundleException {
if (awaitLazyActivation.getAndSet(false)) {
if (startLevelValidForStart() == true) {
int options = START_TRANSIENT;
if (isBundleActivationPolicyUsed()) {
options |= START_ACTIVATION_POLICY;
}
LOGGER.debugf("Lazy activation of: %s", this);
startInternal(options);
}
}
}
@Override
public Class<?> loadClass(String className) throws ClassNotFoundException {
LazyActivationTracker.startTracking(this, className);
try {
Class<?> loadedClass = super.loadClass(className);
LazyActivationTracker.processLoadedClass(loadedClass);
return loadedClass;
} finally {
LazyActivationTracker.stopTracking(this, className);
}
}
private boolean startLevelValidForStart() {
StartLevel startLevelPlugin = getCoreServices().getStartLevel();
return getStartLevel() <= startLevelPlugin.getStartLevel();
}
private boolean isBundleActivationPolicyUsed() {
StorageState storageState = getStorageState();
return storageState.isBundleActivationPolicyUsed();
}
private void setBundleActivationPolicyUsed(boolean usePolicy) {
InternalStorageState storageState = getStorageState();
storageState.setBundleActivationPolicyUsed(usePolicy);
}
boolean isAlreadyStarting() {
return alreadyStarting.get();
}
Set<UserBundleState> getDependentBundles() {
Set<UserBundleState> result = new HashSet<UserBundleState>();
if (isResolved() == true) {
BundleWiring wiring = getBundleRevision().getWiring();
List<Wire> wires = wiring.getRequiredResourceWires(null);
for (Wire wire : wires) {
BundleRevision brev = (BundleRevision) wire.getProvider();
Bundle bundle = brev.getBundle();
if (bundle instanceof UserBundleState)
result.add((UserBundleState) bundle);
}
}
return result;
}
void startInternal(int options) throws BundleException {
// Assert the required start conditions
assertStartConditions();
LOGGER.debugf("Starting bundle: %s", this);
// If the Framework's current start level is less than this bundle's start level
if (startLevelValidForStart() == false) {
// If the START_TRANSIENT option is set, then a BundleException is thrown
// indicating this bundle cannot be started due to the Framework's current start level
if ((options & START_TRANSIENT) != 0)
throw MESSAGES.cannotStartBundleDueToStartLevel();
LOGGER.debugf("Start level [%d] not valid for: %s", getStartLevel(), this);
// Set this bundle's autostart setting
persistAutoStartSettings(options);
return;
}
// #1 If this bundle is in the process of being activated or deactivated
// then this method must wait for activation or deactivation to complete before continuing.
// If this does not occur in a reasonable time, a BundleException is thrown
aquireBundleLock(Method.START);
alreadyStarting.set(true);
try {
// #2 If this bundle's state is ACTIVE then this method returns immediately.
if (getState() == ACTIVE)
return;
// #3 Set this bundle's autostart setting
persistAutoStartSettings(options);
// #4 If this bundle's state is not RESOLVED, an attempt is made to resolve this bundle.
// If the Framework cannot resolve this bundle, a BundleException is thrown.
if (ensureResolved(true) == false) {
ResolutionException resex = getLastResolutionException();
throw MESSAGES.cannotResolveBundle(resex, this);
}
// The BundleContext object is valid during STARTING, STOPPING, and ACTIVE
if (getBundleContextInternal() == null)
createBundleContext();
// #5 If the START_ACTIVATION_POLICY option is set and this bundle's declared activation policy is lazy
boolean useActivationPolicy = (options & START_ACTIVATION_POLICY) != 0;
if (awaitLazyActivation.get() == true && useActivationPolicy == true) {
transitionToStarting(options);
} else {
transitionToActive(options);
}
} finally {
alreadyStarting.set(false);
releaseBundleLock(Method.START);
}
}
private void assertStartConditions() throws BundleException {
// The service platform may run this bundle if any of the execution environments named in the
// Bundle-RequiredExecutionEnvironment header matches one of the execution environments it implements.
List<String> requiredEnvs = getOSGiMetaData().getRequiredExecutionEnvironment();
if (requiredEnvs != null) {
boolean foundSupportedEnv = false;
String frameworkEnvProp = (String) getBundleManager().getProperty(Constants.FRAMEWORK_EXECUTIONENVIRONMENT);
List<String> availableEnvs = Arrays.asList(frameworkEnvProp.split("[,\\s]+"));
for (String aux : requiredEnvs) {
if (availableEnvs.contains(aux)) {
foundSupportedEnv = true;
break;
}
}
if (foundSupportedEnv == false)
throw MESSAGES.unsupportedExecutionEnvironment(requiredEnvs, availableEnvs);
}
}
private void persistAutoStartSettings(int options) {
// The Framework must set this bundle's persistent autostart setting to
// Started with declared activation if the START_ACTIVATION_POLICY option is set or
// Started with eager activation if not set.
if ((options & START_TRANSIENT) == 0) {
setPersistentlyStarted(true);
boolean activationPolicyUsed = (options & START_ACTIVATION_POLICY) != 0;
setBundleActivationPolicyUsed(activationPolicyUsed);
}
}
private void setPersistentlyStarted(boolean started) {
InternalStorageState storageState = getStorageState();
storageState.setPersistentlyStarted(started);
}
private void transitionToStarting(int options) throws BundleException {
// #5.1 If this bundle's state is STARTING then this method returns immediately.
if (getState() == STARTING)
return;
// #5.2 This bundle's state is set to STARTING.
// #5.3 A bundle event of type BundleEvent.LAZY_ACTIVATION is fired
changeState(STARTING, BundleEvent.LAZY_ACTIVATION);
}
private void transitionToActive(int options) throws BundleException {
// #6 This bundle's state is set to STARTING.
// #7 A bundle event of type BundleEvent.STARTING is fired.
try {
changeState(STARTING);
} catch (LifecycleInterceptorException ex) {
throw MESSAGES.cannotTransitionToStarting(ex, this);
}
// #8 The BundleActivator.start(BundleContext) method of this bundle is called
String className = getOSGiMetaData().getBundleActivator();
if (className != null) {
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(null);
bundleActivator = getDeployment().getAttachment(BundleActivator.class);
if (bundleActivator == null) {
Object result = loadClass(className).newInstance();
if (result instanceof BundleActivator) {
bundleActivator = (BundleActivator) result;
} else {
throw MESSAGES.invalidBundleActivator(className);
}
}
if (bundleActivator != null) {
bundleActivator.start(getBundleContext());
}
}
// If the BundleActivator is invalid or throws an exception then
catch (Throwable th) {
// #8.1 This bundle's state is set to STOPPING
// #8.2 A bundle event of type BundleEvent.STOPPING is fired
changeState(STOPPING);
// #8.3 Any services registered by this bundle must be unregistered.
// #8.4 Any services used by this bundle must be released.
// #8.5 Any listeners registered by this bundle must be removed.
removeServicesAndListeners();
// The BundleContext object is valid during STARTING, STOPPING, and ACTIVE
destroyBundleContext();
// #8.6 This bundle's state is set to RESOLVED
// #8.7 A bundle event of type BundleEvent.STOPPED is fired
changeState(RESOLVED);
// #8.8 A BundleException is then thrown
if (th instanceof BundleException)
throw (BundleException) th;
throw MESSAGES.cannotStartBundle(th, this);
} finally {
Thread.currentThread().setContextClassLoader(tccl);
}
}
// #9 If this bundle's state is UNINSTALLED, because this bundle was uninstalled while
// the BundleActivator.start method was running, a BundleException is thrown
if (getState() == UNINSTALLED)
throw MESSAGES.uninstalledDuringActivatorStart(this);
// #10 This bundle's state is set to ACTIVE.
// #11 A bundle event of type BundleEvent.STARTED is fired
changeState(ACTIVE);
// Activate the service that represents bundle state ACTIVE
getBundleManager().setServiceMode(getServiceName(ACTIVE), Mode.ACTIVE);
LOGGER.infoBundleStarted(this);
}
@Override
void stopInternal(int options) throws BundleException {
// #2 If this bundle is in the process of being activated or deactivated
// then this method must wait for activation or deactivation to complete before continuing.
// If this does not occur in a reasonable time, a BundleException is thrown to indicate this bundle was unable to be
// stopped
aquireBundleLock(Method.STOP);
try {
// A concurrent thread may have uninstalled the bundle
if (getState() == UNINSTALLED)
return;
// #3 If the STOP_TRANSIENT option is not set then then set this bundle's persistent autostart setting to Stopped.
// When the Framework is restarted and this bundle's autostart setting is Stopped, this bundle must not be
// automatically started.
if ((options & STOP_TRANSIENT) == 0) {
setPersistentlyStarted(false);
setBundleActivationPolicyUsed(false);
}
// #4 If this bundle's state is not STARTING or ACTIVE then this method returns immediately
int priorState = getState();
if (priorState != STARTING && priorState != ACTIVE)
return;
// #5 This bundle's state is set to STOPPING
// #6 A bundle event of type BundleEvent.STOPPING is fired
changeState(STOPPING);
// #7 If this bundle's state was ACTIVE prior to setting the state to STOPPING,
// the BundleActivator.stop(org.osgi.framework.BundleContext) method of this bundle's BundleActivator, if one is
// specified, is called.
// If that method throws an exception, this method must continue to stop this bundle and a BundleException must be
// thrown after completion
// of the remaining steps.
Throwable rethrow = null;
if (priorState == ACTIVE) {
if (bundleActivator != null) {
try {
bundleActivator.stop(getBundleContext());
} catch (Throwable t) {
rethrow = t;
}
}
}
// #8 Any services registered by this bundle must be unregistered.
// #9 Any services used by this bundle must be released.
// #10 Any listeners registered by this bundle must be removed.
removeServicesAndListeners();
// #11 If this bundle's state is UNINSTALLED, because this bundle was uninstalled while the
// BundleActivator.stop method was running, a BundleException must be thrown
if (getState() == UNINSTALLED)
throw MESSAGES.uninstalledDuringActivatorStop(this);
// The BundleContext object is valid during STARTING, STOPPING, and ACTIVE
destroyBundleContext();
// #12 This bundle's state is set to RESOLVED
// #13 A bundle event of type BundleEvent.STOPPED is fired
changeState(RESOLVED, BundleEvent.STOPPED);
// Deactivate the service that represents bundle state ACTIVE
getBundleManager().setServiceMode(getServiceName(ACTIVE), Mode.NEVER);
LOGGER.infoBundleStopped(this);
if (rethrow != null)
throw MESSAGES.errorDuringActivatorStop(rethrow, this);
} finally {
releaseBundleLock(Method.STOP);
}
}
private void removeServicesAndListeners() {
// Any services registered by this bundle must be unregistered.
// Any services used by this bundle must be released.
for (ServiceState serviceState : getRegisteredServicesInternal()) {
serviceState.unregisterInternal();
}
// Any listeners registered by this bundle must be removed
FrameworkEventsPlugin eventsPlugin = getFrameworkState().getFrameworkEventsPlugin();
eventsPlugin.removeBundleListeners(this);
}
}
|
package org.eclipse.persistence.testing.tests.wdf.jpa1.query;
import org.eclipse.persistence.testing.framework.wdf.Skip;
import org.eclipse.persistence.testing.framework.wdf.ToBeInvestigated;
import org.junit.Test;
public class TestGroupByOrderByHaving extends QueryTest {
@Test
@ToBeInvestigated
public void testOrderBy0() {
assertInvalidQuery("SELECT c FROM City c order by c.type");
}
@Test
public void testOrderBy1() {
assertValidQuery("SELECT c FROM City c order by c.id");
}
@Test
public void testOrderBy2() {
assertInvalidQuery("SELECT c FROM City c order by c.cops");
}
@Test
public void testOrderBy3() {
assertInvalidQuery("SELECT c, max(c.id) FROM City c order by c.id");
}
@Test
@ToBeInvestigated
public void testOrderBy4() {
assertInvalidQuery("SELECT c, new org.eclipse.persistence.testing.models.wdf.jpa1.jpql.Holder(c.id) FROM City c order by c.id");
}
@Test
@ToBeInvestigated
public void testOrderBy5() {
assertInvalidQuery("select c, p.string from Person p, Cop c order by p.integer");
}
@Test
public void testOrderBy6() {
assertValidQuery("select c, p from Person p, Cop c order by p.integer");
}
@Test
public void testOrderBy7() {
assertValidQuery("select c.partner, p from Person p, Cop c order by c.partner.id");
}
@Test
public void testOrderBy8() {
assertValidQuery("select c, p from Person p, Cop c order by c.partner.id");
}
@Test
public void testOrderBy9() {
assertValidQueryExecution("select t from Task t order by t.projectId");
}
@Test
public void testGroupBy0() {
assertValidQuery("select c, p.id from Person p, Cop c group by c, p.id");
}
@Test
public void testGroupBy1() {
assertValidQuery("select c, p.id from Person p, Cop c group by c");
}
@Test
public void testGroupBy2() {
assertValidQuery("select c, p.id from Person p, Cop c group by p.id");
}
@Test
public void testGroupBy3() {
assertValidQuery("select p.id from Person p, Cop c group by c, p.id");
}
@Test
public void testGroupBy4() {
assertValidQuery("select c from Person p, Cop c group by c, p.id");
}
@Test
public void testGroupBy5() {
assertValidQuery("select max(p.integer), c, min(p.string), p.id from Person p, Cop c group by c, p.id");
}
@Test
public void testGroupBy6() {
assertValidQueryExecution("SELECT p,stadt FROM Person p join p.city AS stadt ORDER BY p.id DESC, stadt.name ASC");
}
@Test
public void testGroupBy7() {
// TODO check if query is meaningful
assertValidQuery("SELECT c, new org.eclipse.persistence.testing.models.wdf.jpa1.jpql.Holder(c.id) FROM City c group by c.id");
}
@Test
@ToBeInvestigated
public void testGroupBy8() {
assertInvalidQuery("select c.partner.informers, p.id from Person p, Cop c group by c.partner.informers, p.id");
}
@Test
public void testGroupBy9() {
assertValidQuery("select c, p.id from Person p, Cop c group by c, p.id having p.id = 5 order by p.id");
}
@Test
public void testSubQueryGroupBy0() {
assertValidQuery("select _city from City _city where exists(select c from Cop c group by c, c.id, c.tesla)");
}
@Test
public void testSubQueryGroupBy1() {
assertValidQuery("select _city from City _city where exists(select c from Cop c group by c, c, c having c.tesla is not null)");
}
@Test
public void testSubQueryGroupBy2() {
assertValidQuery("select _city from City _city where exists(select c.id from Cop c group by c.id having c.partner.id = 5)");
}
@Test
@Skip(databaseNames="org.eclipse.persistence.platform.database.MaxDBPlatform")
/*
* On MaxDB, the query maps to
* "SELECT t0.ID, t0.COOL, t0.NAME, t0.TYPE, t0.CITY_ENUM, t0.CITY_TESLA_INT, t0.CITY_TESLA_BLOB FROM TMP_CITY t0 WHERE EXISTS (SELECT 1 FROM TMP_COP t2, TMP_COP t1 WHERE (t2.ID = t1.PARTNER_ID) GROUP BY t1.ID HAVING (t2.ID = 5))"
* . The query is invalid (as expected) and should fail on the database as
* t2.ID is no grouping column and must not be used in HAVING.
*/
public void testSubQueryGroupBy3() {
assertInvalidQuery("select _city from City _city where exists(select max(c.id) from Cop c group by c.id having c.partner.id = 5)");
}
@Test
public void testSubQueryGroupBy4() {
assertValidQuery("select _city from City _city where exists(select c from Cop c group by c.id)");
}
@Test
@ToBeInvestigated
public void testSubQueryGroupBy5() {
assertInvalidQuery("select _city from City _city where exists(select c.tesla from Cop c group by c.id)");
}
@Test
@ToBeInvestigated
public void testConstructorGroupBy0() {
assertValidQuery("SELECT new org.eclipse.persistence.testing.models.wdf.jpa1.jpql.Holder(count(p)) FROM City c, Person p group by c.id");
}
@Test
public void testConstructorGroupBy1() {
assertValidQuery("SELECT new org.eclipse.persistence.testing.models.wdf.jpa1.jpql.Holder(max(c.id)) FROM City c group by c.id");
}
}
|
package com.rho.connectionchecking;
import com.rhomobile.rhodes.api.RhoApiFactory;
public class ConnectionCheckingFactory implements IConnectionCheckingFactory {
private ConnectionCheckingSingleton singleton;
@Override
public
IConnectionCheckingSingleton getApiSingleton() {
if(singleton == null) singleton = new ConnectionCheckingSingleton(this);
return singleton;
}
@Override
public
IConnectionChecking getApiObject(String id) {
return null;
}
}
|
package org.zstack.core.cloudbus;
import org.apache.commons.lang.StringUtils;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.GsonTransient;
import org.zstack.header.message.NeedJsonSchema;
import org.zstack.header.message.NoJsonSchema;
import org.zstack.header.search.Inventory;
import org.zstack.utils.FieldUtils;
import org.zstack.utils.TypeUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
public class MessageJsonSchemaBuilder {
private static Map<Field, Field> skipMap = new ConcurrentHashMap<Field, Field>();
private static boolean isSkip(Field f) {
if (skipMap.containsKey(f)) {
return true;
}
if (TypeUtils.isPrimitiveOrWrapper(f.getType())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(NoJsonSchema.class)) {
skipMap.put(f, f);
return true;
}
if (Modifier.isStatic(f.getModifiers())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(GsonTransient.class)) {
skipMap.put(f, f);
return true;
}
return false;
}
private static Object getValue(Field f, Object obj) throws IllegalAccessException {
f.setAccessible(true);
return f.get(obj);
}
private static boolean isNullValue(Field f, Object obj) throws IllegalAccessException {
return getValue(f, obj) == null;
}
private static void buildSchema(Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
List<Field> fs = FieldUtils.getAllFields(obj.getClass());
for (Field f : fs) {
if (isSkip(f)) {
continue;
}
if (Map.class.isAssignableFrom(f.getType())) {
schemaMap(f, obj, schema, trace, paths);
continue;
}
if (Collection.class.isAssignableFrom(f.getType())) {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
continue;
}
if (!List.class.isAssignableFrom(f.getType())) {
throw new CloudRuntimeException(String.format("the collection type in message can only be List, but %s.%s is %s",
f.getDeclaringClass().getName(), f.getName(), f.getType().getName()));
}
schemaList(f, obj, schema, trace, paths);
continue;
}
schemaObject(f, obj, schema, trace, paths);
}
}
private static void schemaList(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, "
}
paths.push(value);
List col = (List) value;
for (Object item : col) {
String itemName = String.format("%s[%s]", f.getName(), col.indexOf(item));
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
public static Map<String, List<String>> buildSchema(Object msg) {
try {
Stack<Object> paths = new Stack<Object>();
Stack<String> trace = new Stack<String>();
Map<String, List<String>> schema = new LinkedHashMap<String, List<String>>();
buildSchema(msg, schema, trace, paths);
return schema;
} catch (Exception e) {
throw new CloudRuntimeException(e);
}
}
private static void schemaObject(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, "
}
if (isObjectNeedSchema(value)) {
addToSchema(value.getClass(), f.getName(), schema, trace);
}
paths.push(value);
trace.push(f.getName());
buildSchema(value, schema, trace, paths);
trace.pop();
paths.pop();
}
private static void addToSchema(Class<?> realClass, String name, Map<String, List<String>> schema, Stack<String> trace) {
String base = StringUtils.join(trace, ".");
List<String> path = schema.get(realClass.getName());
if (path == null) {
path = new ArrayList<String>();
schema.put(realClass.getName(), path);
}
if (base.equals("")) {
path.add(name);
} else {
path.add(String.format("%s.%s", base, name));
}
}
private static boolean isObjectNeedSchema(Object obj) {
return obj != null && (obj.getClass().isAnnotationPresent(Inventory.class) || obj.getClass().isAnnotationPresent(NeedJsonSchema.class));
}
private static void schemaMap(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
return;
}
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, "
}
paths.push(obj);
Map map = (Map) value;
Iterator<Entry> it = map.entrySet().iterator();
while (it.hasNext()) {
Entry e = it.next();
String key = e.getKey().toString();
Object item = e.getValue();
String itemName = String.format("%s[\"%s\"]", f.getName(), key);
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
}
|
package com.rubentxu.juegos.core.controladores;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.GdxNativesLoader;
import com.rubentxu.juegos.core.controladores.WorldController.Keys;
import com.rubentxu.juegos.core.managers.RubentxuManager;
import com.rubentxu.juegos.core.modelo.Rubentxu;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class WorldControllerTest {
WorldController worldController;
@Before
public void testSetup() {
worldController = new WorldController();
}
@Test
public void testLeftPressed() {
worldController.leftPressed();
assertWorldContainsKey(WorldController.Keys.LEFT);
}
private void assertWorldContainsKey(Keys key) {
assertTrue(WorldController.keys.get(key));
}
@Test
public void testLeftReleased() {
worldController.leftReleased();
assertFalse(WorldController.keys.get(WorldController.Keys.LEFT));
}
@Test
public void testRightPressed() {
worldController.rightPressed();
assertWorldContainsKey(WorldController.Keys.RIGHT);
}
@Test
public void testRightReleased() {
worldController.rightReleased();
assertFalse(WorldController.keys.get(WorldController.Keys.RIGHT));
}
@Test
public void testJumpPressed() {
worldController.jumpPressed();
assertWorldContainsKey(WorldController.Keys.JUMP);
}
@Test
public void testJumpReleased() {
worldController.jumpReleased();
assertFalse(WorldController.keys.get(WorldController.Keys.JUMP));
}
@Test
public void testFirePressed() {
worldController.firePressed();
assertWorldContainsKey(WorldController.Keys.FIRE);
}
@Test
public void testFireReleased() {
worldController.fireReleased();
assertFalse(WorldController.keys.get(WorldController.Keys.FIRE));
}
}
|
package org.navalplanner.web.planner.allocation.streches;
import static org.navalplanner.web.I18nHelper._;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.Validate;
import org.navalplanner.business.common.exceptions.ValidationException;
import org.navalplanner.business.planner.entities.AssignmentFunction;
import org.navalplanner.business.planner.entities.ResourceAllocation;
import org.navalplanner.business.planner.entities.Stretch;
import org.navalplanner.business.planner.entities.StretchesFunction;
import org.navalplanner.business.planner.entities.Task;
import org.navalplanner.business.planner.entities.StretchesFunction.Type;
import org.navalplanner.web.common.Util;
import org.zkoss.zk.ui.Component;
import org.zkoss.zk.ui.SuspendNotAllowedException;
import org.zkoss.zk.ui.WrongValueException;
import org.zkoss.zk.ui.event.Event;
import org.zkoss.zk.ui.event.EventListener;
import org.zkoss.zk.ui.event.Events;
import org.zkoss.zk.ui.util.GenericForwardComposer;
import org.zkoss.zul.Button;
import org.zkoss.zul.Datebox;
import org.zkoss.zul.Decimalbox;
import org.zkoss.zul.Listcell;
import org.zkoss.zul.Listitem;
import org.zkoss.zul.ListitemRenderer;
import org.zkoss.zul.Messagebox;
import org.zkoss.zul.XYModel;
import org.zkoss.zul.api.Window;
public class StretchesFunctionController extends GenericForwardComposer {
public interface IGraphicGenerator {
public boolean areChartsEnabled(IStretchesFunctionModel model);
XYModel getDedicationChart(
IStretchesFunctionModel stretchesFunctionModel);
XYModel getAccumulatedHoursChartData(
IStretchesFunctionModel stretchesFunctionModel);
}
private Window window;
private IStretchesFunctionModel stretchesFunctionModel;
private StretchesRenderer stretchesRenderer = new StretchesRenderer();
private String title;
private final IGraphicGenerator graphicGenerator;
public StretchesFunctionController(IGraphicGenerator graphicGenerator) {
Validate.notNull(graphicGenerator);
this.graphicGenerator = graphicGenerator;
}
@Override
public void doAfterCompose(Component comp) throws Exception {
super.doAfterCompose(comp);
window = (Window) comp;
}
public AssignmentFunction getAssignmentFunction() {
return stretchesFunctionModel.getStretchesFunction();
}
public void setResourceAllocation(ResourceAllocation<?> resourceAllocation,
Type type) {
AssignmentFunction assignmentFunction = resourceAllocation
.getAssignmentFunction();
Task task = resourceAllocation.getTask();
stretchesFunctionModel.init((StretchesFunction) assignmentFunction,
task, type);
reloadStretchesListAndCharts();
}
public void showWindow() {
try {
window.doModal();
} catch (SuspendNotAllowedException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public void confirm() throws InterruptedException {
try {
stretchesFunctionModel.confirm();
window.setVisible(false);
} catch (ValidationException e) {
Messagebox.show(e.getMessage(), _("Error"), Messagebox.OK,
Messagebox.ERROR);
}
}
public void cancel() throws InterruptedException {
int status = Messagebox.show(
_("You will lose the changes. Are you sure?"),
_("Confirm cancel"), Messagebox.YES | Messagebox.NO,
Messagebox.QUESTION);
if (Messagebox.YES == status) {
stretchesFunctionModel.cancel();
window.setVisible(false);
}
}
public List<Stretch> getStretches() {
return stretchesFunctionModel.getStretches();
}
public StretchesRenderer getStretchesRenderer() {
return stretchesRenderer;
}
private interface IFocusApplycability {
public abstract boolean focusIfApplycableOnLength(Stretch strech,
Decimalbox lenghtPercentage);
public abstract boolean focusIfApplycableOnAmountWork(Stretch strech,
Decimalbox amountWork);
}
private static class FocusState implements IFocusApplycability {
private static final NoFocus NO_FOCUS = new NoFocus();
private IFocusApplycability currentFocus;
private FocusState(IFocusApplycability currentFocus) {
this.currentFocus = currentFocus;
}
public static FocusState noFocus() {
return new FocusState(NO_FOCUS);
}
@Override
public boolean focusIfApplycableOnAmountWork(Stretch strech,
Decimalbox amountWork) {
boolean result = currentFocus.focusIfApplycableOnAmountWork(strech,
amountWork);
if (result) {
currentFocus = NO_FOCUS;
}
return result;
}
@Override
public boolean focusIfApplycableOnLength(Stretch strech,
Decimalbox lenghtPercentage) {
boolean result = currentFocus.focusIfApplycableOnLength(strech,
lenghtPercentage);
if (result) {
currentFocus = NO_FOCUS;
}
return result;
}
public void focusOn(Stretch stretch, Field field) {
this.currentFocus = new FocusOnStrech(stretch, field);
}
}
private static class NoFocus implements IFocusApplycability {
@Override
public boolean focusIfApplycableOnAmountWork(Stretch strech,
Decimalbox amountWork) {
return false;
}
@Override
public boolean focusIfApplycableOnLength(Stretch strech,
Decimalbox lenghtPercentage) {
return false;
}
}
public enum Field {
AMOUNT_WORK, LENGTH
}
private static class FocusOnStrech implements IFocusApplycability {
private final Stretch stretch;
private final Field field;
public FocusOnStrech(Stretch stretch, Field field) {
this.stretch = stretch;
this.field = field;
}
@Override
public boolean focusIfApplycableOnAmountWork(Stretch stretch,
Decimalbox amountWork) {
if (field == Field.AMOUNT_WORK && this.stretch.equals(stretch)) {
amountWork.focus();
return false;
}
return true;
}
@Override
public boolean focusIfApplycableOnLength(Stretch stretch,
Decimalbox lenghtPercentage) {
if (field == Field.LENGTH && this.stretch.equals(stretch)) {
lenghtPercentage.focus();
return true;
}
return false;
}
}
private FocusState focusState = FocusState.noFocus();
/**
* Renders a {@link Stretch}.
*
* @author Manuel Rego Casasnovas <mrego@igalia.com>
*/
private class StretchesRenderer implements ListitemRenderer {
@Override
public void render(Listitem item, Object data) throws Exception {
Stretch stretch = (Stretch) data;
item.setValue(stretch);
appendDate(item, stretch);
appendLengthPercentage(item, stretch);
appendAmountWorkPercentage(item, stretch);
appendOperations(item, stretch);
}
private void appendChild(Listitem item, Component component) {
Listcell listcell = new Listcell();
listcell.appendChild(component);
item.appendChild(listcell);
}
private void appendDate(Listitem item, final Stretch stretch) {
final Datebox tempDatebox = new Datebox();
Datebox datebox = Util.bind(tempDatebox, new Util.Getter<Date>() {
@Override
public Date get() {
return stretch.getDate().toDateTimeAtStartOfDay().toDate();
}
}, new Util.Setter<Date>() {
@Override
public void set(Date value) {
try {
stretchesFunctionModel.setStretchDate(stretch, value);
reloadStretchesListAndCharts();
} catch (IllegalArgumentException e) {
throw new WrongValueException(tempDatebox, e
.getMessage());
}
}
});
appendChild(item, datebox);
}
private void appendLengthPercentage(Listitem item, final Stretch stretch) {
final Decimalbox tempDecimalbox = new Decimalbox();
Decimalbox decimalbox = Util.bind(tempDecimalbox,
new Util.Getter<BigDecimal>() {
@Override
public BigDecimal get() {
return stretch.getLengthPercentage().multiply(
new BigDecimal(100));
}
}, new Util.Setter<BigDecimal>() {
@Override
public void set(BigDecimal value) {
value = value.setScale(2).divide(
new BigDecimal(100), RoundingMode.DOWN);
try {
stretchesFunctionModel
.setStretchLengthPercentage(stretch,
value);
focusState.focusOn(stretch, Field.LENGTH);
reloadStretchesListAndCharts();
} catch (IllegalArgumentException e) {
throw new WrongValueException(tempDecimalbox, e
.getMessage());
}
}
});
appendChild(item, decimalbox);
focusState.focusIfApplycableOnLength(stretch, decimalbox);
}
private void appendAmountWorkPercentage(Listitem item,
final Stretch stretch) {
final Decimalbox decimalBox = new Decimalbox();
Util.bind(decimalBox,
new Util.Getter<BigDecimal>() {
@Override
public BigDecimal get() {
return stretch.getAmountWorkPercentage().multiply(
new BigDecimal(100));
}
}, new Util.Setter<BigDecimal>() {
@Override
public void set(BigDecimal value) {
if(value==null){
value = BigDecimal.ZERO;
}
value = value.setScale(2).divide(
new BigDecimal(100), RoundingMode.DOWN);
try {
stretch.setAmountWorkPercentage(value);
focusState.focusOn(stretch, Field.AMOUNT_WORK);
reloadStretchesListAndCharts();
} catch (IllegalArgumentException e) {
throw new WrongValueException(
decimalBox,
_("Amount work percentage should be between 0 and 100"));
}
}
});
appendChild(item, decimalBox);
focusState.focusIfApplycableOnAmountWork(stretch, decimalBox);
}
private void appendOperations(Listitem item, final Stretch stretch) {
Button button = new Button("", "/common/img/ico_borrar1.png");
button.setHoverImage("/common/img/ico_borrar.png");
button.setSclass("icono");
button.setTooltiptext(_("Delete"));
button.addEventListener(Events.ON_CLICK, new EventListener() {
@Override
public void onEvent(Event event) throws Exception {
stretchesFunctionModel.removeStretch(stretch);
reloadStretchesListAndCharts();
}
});
appendChild(item, button);
}
}
public void addStretch() {
stretchesFunctionModel.addStretch();
reloadStretchesListAndCharts();
}
private void reloadStretchesListAndCharts() {
Util.reloadBindings(window.getFellow("stretchesList"));
Util.reloadBindings(window.getFellow("charts"));
}
public XYModel getDedicationChartData() {
return graphicGenerator.getDedicationChart(stretchesFunctionModel);
}
public XYModel getAccumulatedHoursChartData() {
return graphicGenerator
.getAccumulatedHoursChartData(stretchesFunctionModel);
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public boolean isChartsEnabled() {
return graphicGenerator.areChartsEnabled(stretchesFunctionModel);
}
public boolean isChartsDisabled() {
return !isChartsEnabled();
}
}
|
package cn.momia.api.course;
import cn.momia.api.course.dto.BookedCourseDto;
import cn.momia.api.course.dto.CourseCommentDto;
import cn.momia.api.course.dto.CourseDetailDto;
import cn.momia.api.course.dto.CourseDto;
import cn.momia.api.course.dto.DatedCourseSkusDto;
import cn.momia.api.course.dto.InstitutionDto;
import cn.momia.api.course.dto.TeacherDto;
import cn.momia.common.api.ServiceApi;
import cn.momia.common.api.dto.PagedList;
import cn.momia.common.api.http.MomiaHttpParamBuilder;
import cn.momia.common.api.http.MomiaHttpRequestBuilder;
import cn.momia.common.api.util.CastUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.http.client.methods.HttpUriRequest;
import java.util.List;
public class CourseServiceApi extends ServiceApi {
public CourseDto get(long courseId, String pos) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder().add("pos", pos);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId), builder.build());
return CastUtil.toObject((JSON) executeRequest(request), CourseDto.class);
}
public PagedList<CourseDto> query(long subjectId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("suid", subjectId)
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course/query"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), CourseDto.class);
}
public PagedList<CourseDto> query(long subjectId, int minAge, int maxAge, int sortTypeId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("suid", subjectId)
.add("min", minAge)
.add("max", maxAge)
.add("sort", sortTypeId)
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course/query"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), CourseDto.class);
}
public CourseDetailDto detail(long courseId) {
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "detail"));
return CastUtil.toObject((JSON) executeRequest(request), CourseDetailDto.class);
}
public PagedList<String> book(long courseId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "book"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), String.class);
}
public PagedList<TeacherDto> queryTeachers(long courseId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "teacher"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), TeacherDto.class);
}
public InstitutionDto institution(long courseId) {
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "institution"));
return CastUtil.toObject((JSON) executeRequest(request), InstitutionDto.class);
}
public List<DatedCourseSkusDto> listWeekSkus(long courseId) {
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "sku/week"));
return CastUtil.toList((JSON) executeRequest(request), DatedCourseSkusDto.class);
}
public List<DatedCourseSkusDto> listMonthSkus(long courseId, int month) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder().add("month", month);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "sku/month"), builder.build());
return CastUtil.toList((JSON) executeRequest(request), DatedCourseSkusDto.class);
}
public PagedList<BookedCourseDto> queryNotFinishedByUser(long userId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("uid", userId)
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course/notfinished"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), BookedCourseDto.class);
}
public PagedList<BookedCourseDto> queryFinishedByUser(long userId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("uid", userId)
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course/finished"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), BookedCourseDto.class);
}
public boolean booking(String utoken, long packageId, long skuId) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("utoken", utoken)
.add("pid", packageId)
.add("sid", skuId);
HttpUriRequest request = MomiaHttpRequestBuilder.POST(url("course/booking"), builder.build());
return (Boolean) executeRequest(request);
}
public boolean cancel(String utoken, long bookingId) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("utoken", utoken)
.add("bid", bookingId);
HttpUriRequest request = MomiaHttpRequestBuilder.POST(url("course/cancel"), builder.build());
return (Boolean) executeRequest(request);
}
public boolean comment(JSONObject commentJson) {
HttpUriRequest request = MomiaHttpRequestBuilder.POST(url("course/comment"), commentJson.toString());
return (Boolean) executeRequest(request);
}
public PagedList<CourseCommentDto> queryCommentsByCourse(long courseId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "comment"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), CourseCommentDto.class);
}
public PagedList<CourseCommentDto> queryCommentsBySubject(long subjectId, int start, int count) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder()
.add("start", start)
.add("count", count);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("subject", subjectId, "comment"), builder.build());
return CastUtil.toPagedList((JSON) executeRequest(request), CourseCommentDto.class);
}
public boolean isFavored(long userId, long courseId) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder().add("uid", userId);
HttpUriRequest request = MomiaHttpRequestBuilder.GET(url("course", courseId, "favored"), builder.build());
return (Boolean) executeRequest(request);
}
public boolean favor(long userId, long courseId) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder().add("uid", userId);
HttpUriRequest request = MomiaHttpRequestBuilder.POST(url("course", courseId, "favor"), builder.build());
return (Boolean) executeRequest(request);
}
public boolean unfavor(long userId, long courseId) {
MomiaHttpParamBuilder builder = new MomiaHttpParamBuilder().add("uid", userId);
HttpUriRequest request = MomiaHttpRequestBuilder.POST(url("course", courseId, "unfavor"), builder.build());
return (Boolean) executeRequest(request);
}
}
|
package net.ontrack.extension.jira.dao;
import com.google.common.collect.Iterables;
import net.ontrack.core.model.Ack;
import net.ontrack.dao.AbstractJdbcDao;
import net.ontrack.extension.jira.service.JIRAConfigurationNameAlreadyExistsException;
import net.ontrack.extension.jira.service.model.JIRAConfiguration;
import net.ontrack.extension.jira.service.model.JIRAConfigurationForm;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DuplicateKeyException;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Set;
@Component
public class JIRAConfigurationJdbcDao extends AbstractJdbcDao implements JIRAConfigurationDao {
private final RowMapper<JIRAConfiguration> jiraConfigurationRowMapper = new RowMapper<JIRAConfiguration>() {
@Override
public JIRAConfiguration mapRow(ResultSet rs, int rowNum) throws SQLException {
String exclusions = rs.getString("exclusions");
ExclusionsParser exclusionsParser = new ExclusionsParser(exclusions).invoke();
Set<String> excludedProjects = exclusionsParser.getExcludedProjects();
Set<String> excludedIssues = exclusionsParser.getExcludedIssues();
return new JIRAConfiguration(
rs.getInt("id"),
rs.getString("name"),
rs.getString("url"),
rs.getString("user"),
excludedProjects,
excludedIssues
);
}
};
@Autowired
public JIRAConfigurationJdbcDao(DataSource dataSource) {
super(dataSource);
}
@Override
public List<JIRAConfiguration> findAll() {
return getJdbcTemplate().query(
JIRASQL.JIRA_CONFIGURATION_ALL,
jiraConfigurationRowMapper
);
}
@Override
public JIRAConfiguration create(JIRAConfigurationForm form) {
try {
String exclusions = getExclusionsAsString(form.getExcludedProjects(), form.getExcludedIssues());
int id = dbCreate(
JIRASQL.JIRA_CONFIGURATION_CREATE,
params("name", form.getName())
.addValue("url", form.getUrl())
.addValue("user", form.getUser())
.addValue("password", form.getPassword())
.addValue("exclusions", exclusions)
);
return getById(id);
} catch (DuplicateKeyException ex) {
throw new JIRAConfigurationNameAlreadyExistsException(form.getName());
}
}
private String getExclusionsAsString(Set<String> excludedProjects, Set<String> excludedIssues) {
return StringUtils.join(
Iterables.concat(
excludedProjects,
excludedIssues
),
","
);
}
@Override
public JIRAConfiguration update(int id, JIRAConfigurationForm form) {
try {
String password = form.getPassword();
if (StringUtils.isBlank(password)) {
password = getPassword(id);
}
String exclusions = getExclusionsAsString(form.getExcludedProjects(), form.getExcludedIssues());
getNamedParameterJdbcTemplate().update(
JIRASQL.JIRA_CONFIGURATION_UPDATE,
params("id", id)
.addValue("name", form.getName())
.addValue("url", form.getUrl())
.addValue("user", form.getUser())
.addValue("password", password)
.addValue("exclusions", exclusions)
);
return getById(id);
} catch (DuplicateKeyException ex) {
throw new JIRAConfigurationNameAlreadyExistsException(form.getName());
}
}
@Override
public JIRAConfiguration getById(int id) {
return getNamedParameterJdbcTemplate().queryForObject(
JIRASQL.JIRA_CONFIGURATION_BY_ID,
params("id", id),
jiraConfigurationRowMapper
);
}
@Override
public JIRAConfiguration getByName(String name) {
return getNamedParameterJdbcTemplate().queryForObject(
JIRASQL.JIRA_CONFIGURATION_BY_NAME,
params("name", name),
jiraConfigurationRowMapper
);
}
@Override
public Ack delete(int id) {
return Ack.one(
getNamedParameterJdbcTemplate().update(
JIRASQL.JIRA_CONFIGURATION_DELETE,
params("id", id)
)
);
}
@Override
public String getPassword(int id) {
return getNamedParameterJdbcTemplate().queryForObject(
JIRASQL.JIRA_CONFIGURATION_PASSWORD,
params("id", id),
String.class
);
}
}
|
package edu.harvard.cscie71.hw1;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* @author eborysko
*
*/
public class TestHW1 {
HW1 tester;
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
tester = new HW1();
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
tester = null;
}
/**
* Test method for {@link edu.harvard.cscie71.hw1.HW1#sum(int, int)}.
*/
@Test
public void testSumIntInt() {
//fail("Not yet implemented");
assertEquals("0 + 0 = 0", 0, tester.sum(0, 0) );
assertEquals("1 + 0 = 1", 1, tester.sum(1, 0) );
assertEquals("2 + 1 = 3", 3, tester.sum(2, 1) );
assertEquals("4 + 3 = 7", 7, tester.sum(4, 3) );
assertEquals("4 + -7 = -3", -3, tester.sum(4, -7) );
}
/**
* Test method for {@link edu.harvard.cscie71.hw1.HW1#sum(java.lang.Integer, java.lang.Integer)}.
*/
@Test
public void testSumIntegerInteger() {
//fail("Not yet implemented");
//Integer x,y;
//x = null;
//y = null;
//assertEquals("0 + null = 0", 0, tester.sum(new Integer(0), y ));
//assertEquals("null + 0 = 0", 0, tester.sum( x, new Integer(0)) );
assertEquals("0 + 0 = 0", 0, tester.sum(new Integer(0), new Integer(0)) );
assertEquals("0 + 1 = 1", 1, tester.sum(new Integer(0), new Integer(1)) );
assertEquals("1 + 2 = 3", 3, tester.sum(new Integer(1), new Integer(2)) );
assertEquals("3 + 4 = 7", 7, tester.sum(new Integer(3), new Integer(4)) );
assertEquals("4 + -7 = -3", -3, tester.sum(new Integer(4), new Integer(-7)) );
assertEquals("-3 + -7 = -10", -3, tester.sum(new Integer(-3), new Integer(-7)) );
}
}
|
package com.intellij.internal.statistic.collectors.fus.ui;
import com.intellij.internal.statistic.beans.MetricEvent;
import com.intellij.internal.statistic.beans.MetricEventFactoryKt;
import com.intellij.internal.statistic.eventLog.FeatureUsageData;
import com.intellij.internal.statistic.service.fus.collectors.ApplicationUsagesCollector;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.colors.impl.AbstractColorsScheme;
import com.intellij.openapi.options.SchemeManager;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.ColorUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashSet;
import java.util.Set;
public class EditorColorSchemesUsagesCollector extends ApplicationUsagesCollector {
private final static int CURR_VERSION = 2;
public static final String SCHEME_NAME_OTHER = "Other";
public final static String[] KNOWN_NAMES = {
"Default",
"Darcula",
"Obsidian",
"Visual Studio",
"Solarized",
"Wombat",
"Monkai",
"XCode",
"Sublime",
"Oblivion",
"Zenburn",
"Cobalt",
"Netbeans",
"Eclipse",
"Aptana",
"Flash Builder",
"IdeaLight",
"High сontrast",
"ReSharper",
"Rider"
};
@Override
public int getVersion() {
return CURR_VERSION;
}
@NotNull
@Override
public Set<MetricEvent> getMetrics() {
EditorColorsScheme currentScheme = EditorColorsManager.getInstance().getGlobalScheme();
Set<MetricEvent> usages = new HashSet<>();
if (currentScheme instanceof AbstractColorsScheme) {
String schemeName = currentScheme.getName();
if (schemeName.startsWith(SchemeManager.EDITABLE_COPY_PREFIX)) {
EditorColorsScheme original = ((AbstractColorsScheme)currentScheme).getOriginal();
if (original != null) {
schemeName = original.getName();
}
}
final String reportableName = getKnownSchemeName(schemeName);
usages.add(MetricEventFactoryKt.newMetric(reportableName, ColorUtil.isDark(currentScheme.getDefaultBackground())));
}
return usages;
}
@NotNull
private static String getKnownSchemeName(@NonNls @NotNull String schemeName) {
for (@NonNls String knownName : KNOWN_NAMES) {
if (StringUtil.toUpperCase(schemeName).contains(StringUtil.toUpperCase(knownName))) {
return knownName;
}
}
return SCHEME_NAME_OTHER;
}
@NotNull
@Override
public String getGroupId() {
return "ui.editor.color.schemes";
}
@Nullable
@Override
public FeatureUsageData getData() {
return new FeatureUsageData().addOS();
}
}
|
package org.geomajas.widget.layer.client.widget;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import org.geomajas.annotation.Api;
import org.geomajas.configuration.client.ClientToolInfo;
import org.geomajas.configuration.client.ClientVectorLayerInfo;
import org.geomajas.gwt.client.Geomajas;
import org.geomajas.gwt.client.action.ToolbarBaseAction;
import org.geomajas.gwt.client.action.layertree.LayerTreeAction;
import org.geomajas.gwt.client.action.layertree.LayerTreeModalAction;
import org.geomajas.gwt.client.action.layertree.LayerTreeRegistry;
import org.geomajas.gwt.client.map.event.LayerChangedHandler;
import org.geomajas.gwt.client.map.event.LayerFilteredEvent;
import org.geomajas.gwt.client.map.event.LayerFilteredHandler;
import org.geomajas.gwt.client.map.event.LayerLabeledEvent;
import org.geomajas.gwt.client.map.event.LayerShownEvent;
import org.geomajas.gwt.client.map.event.LayerStyleChangeEvent;
import org.geomajas.gwt.client.map.event.LayerStyleChangedHandler;
import org.geomajas.gwt.client.map.layer.Layer;
import org.geomajas.gwt.client.map.layer.RasterLayer;
import org.geomajas.gwt.client.map.layer.VectorLayer;
import org.geomajas.gwt.client.util.UrlBuilder;
import org.geomajas.gwt.client.widget.MapWidget;
import org.geomajas.sld.FeatureTypeStyleInfo;
import org.geomajas.sld.RuleInfo;
import org.geomajas.sld.UserStyleInfo;
import org.geomajas.widget.layer.client.LayerMessages;
import org.geomajas.widget.layer.client.util.LayerIconUtil;
import org.geomajas.widget.layer.configuration.client.ClientAbstractNodeInfo;
import org.geomajas.widget.layer.configuration.client.ClientBranchNodeInfo;
import org.geomajas.widget.layer.configuration.client.ClientLayerNodeInfo;
import org.geomajas.widget.layer.configuration.client.ClientLayerTreeInfo;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.shared.HandlerRegistration;
import com.smartgwt.client.types.Alignment;
import com.smartgwt.client.types.SelectionType;
import com.smartgwt.client.widgets.Canvas;
import com.smartgwt.client.widgets.IButton;
import com.smartgwt.client.widgets.ImgButton;
import com.smartgwt.client.widgets.events.ClickEvent;
import com.smartgwt.client.widgets.events.ClickHandler;
import com.smartgwt.client.widgets.grid.ListGridRecord;
import com.smartgwt.client.widgets.layout.HLayout;
import com.smartgwt.client.widgets.layout.LayoutSpacer;
import com.smartgwt.client.widgets.tree.TreeGrid;
import com.smartgwt.client.widgets.tree.TreeNode;
import com.smartgwt.client.widgets.tree.events.LeafClickEvent;
/**
* A layertree widget with combined legend per layer.
*
* @author Kristof Heirwegh
* @since 1.0.0
*/
@Api
public class CombinedLayertree extends LayerTreeBase {
private static final String LEGEND_ICONS_PATH = "legendgraphic";
private static final String LEGEND_ICONS_TYPE = ".png";
private static final String SHOW_LAYERINFO_ICON = "[ISOMORPHIC]/geomajas/silk/cog.png";
private static final LayerMessages MESSAGES = GWT.create(LayerMessages.class);
private static final String EXPANDED_ATTR = "isExpanded";
private final MapWidget mapWidget;
private final List<HandlerRegistration> registrations = new ArrayList<HandlerRegistration>();
protected LayerTreeTreeNode rollOverLayerTreeNode;
private HashMap<VectorLayer, List<LayerTreeLegendItemNode>> legendIcons =
new HashMap<VectorLayer, List<LayerTreeLegendItemNode>>();
public CombinedLayertree(final MapWidget mapWidget) {
super(mapWidget);
this.mapWidget = mapWidget;
treeGrid.setShowRollOverCanvas(true);
}
public int getIconSize() {
return treeGrid.getImageSize();
}
public void setIconSize(int iconSize) {
treeGrid.setIconSize(iconSize);
}
/**
* Processes a treeNode (add it to the TreeGrid).
*
* @param treeNode
* The treeNode to process
* @param nodeRoot
* The root node to which the treeNode has to be added
* @param refresh
* True if the tree is refreshed (causing it to keep its expanded state)
*/
protected void processNode(final ClientAbstractNodeInfo treeNode, final TreeNode nodeRoot, final boolean refresh) {
// Branches
if (null != treeNode && treeNode instanceof ClientBranchNodeInfo) {
String treeNodeLabel = ((ClientBranchNodeInfo) treeNode).getLabel();
final TreeNode node = new TreeNode(treeNodeLabel);
node.setAttribute(EXPANDED_ATTR, ((ClientBranchNodeInfo) treeNode).isExpanded());
tree.add(node, nodeRoot);
// treeNodes
List<ClientAbstractNodeInfo> children = treeNode.getTreeNodes();
for (ClientAbstractNodeInfo newNode : children) {
processNode(newNode, node, refresh);
}
// Leafs
} else if (null != treeNode && treeNode instanceof ClientLayerNodeInfo) {
if (treeNode instanceof ClientLayerNodeInfo) {
Layer<?> layer = mapModel.getLayer(((ClientLayerNodeInfo) treeNode).getLayerId());
// Ignore layers that are not available in the map
if (layer != null) {
LayerTreeLegendNode ltln = new LayerTreeLegendNode(this.tree, layer);
tree.add(ltln, nodeRoot);
ltln.init();
}
}
}
}
/**
* When a legendItem is selected, select the layer instead.
*
* @param event
* event
*/
public void onLeafClick(LeafClickEvent event) {
LayerTreeTreeNode layerTreeNode;
if (event.getLeaf() instanceof LayerTreeLegendItemNode) {
layerTreeNode = ((LayerTreeLegendItemNode) event.getLeaf()).parent;
treeGrid.deselectRecord(event.getLeaf());
treeGrid.selectRecord(layerTreeNode);
} else {
layerTreeNode = (LayerTreeTreeNode) event.getLeaf();
}
// -- update model
mapModel.selectLayer(layerTreeNode.getLayer());
}
/**
* Node with legend for LayerNode.
*/
public class LayerTreeLegendNode extends LayerTreeTreeNode {
public LayerTreeLegendNode(RefreshableTree tree, Layer<?> layer) {
super(tree, layer);
}
public void init() {
if (layer instanceof VectorLayer) {
VectorLayer vl = (VectorLayer) layer;
ArrayList<LayerTreeLegendItemNode> nodeList = new ArrayList<LayerTreeLegendItemNode>();
legendIcons.put(vl, nodeList);
ClientVectorLayerInfo layerInfo = vl.getLayerInfo();
// For vector layer; loop over the style definitions:
UserStyleInfo userStyle = layerInfo.getNamedStyleInfo().getUserStyle();
FeatureTypeStyleInfo info = userStyle.getFeatureTypeStyleList().get(0);
for (int i = 0; i < info.getRuleList().size(); i++) {
RuleInfo rule = info.getRuleList().get(i);
// use title if present, name if not
String title = (rule.getTitle() != null ? rule.getTitle() : rule.getName());
// fall back to style name
if (title == null) {
title = layerInfo.getNamedStyleInfo().getName();
}
LayerTreeLegendItemNode tn = new LayerTreeLegendItemNode(this, vl, i, title);
nodeList.add(tn);
tree.add(tn, this);
}
} else if (layer instanceof RasterLayer) {
RasterLayer rl = (RasterLayer) layer;
LayerTreeLegendItemNode tn = new LayerTreeLegendItemNode(this, rl,
LayerIconUtil.getSmallLayerIconUrl(rl));
tree.add(tn, this);
}
}
}
/**
* Node which displays a legend icon + description.
*/
public class LayerTreeLegendItemNode extends LayerTreeTreeNode {
private LayerTreeLegendNode parent;
private final UrlBuilder url = new UrlBuilder(Geomajas.getDispatcherUrl());
private int ruleIndex;
// rasterlayer
public LayerTreeLegendItemNode(LayerTreeLegendNode parent, RasterLayer layer, String rasterIconUrl) {
super(parent.tree, parent.layer);
this.parent = parent;
setTitle(layer.getLabel());
setName(parent.getAttribute("id") + "_legend");
if (rasterIconUrl != null) {
setIcon(rasterIconUrl);
} else {
url.addPath(LEGEND_ICONS_PATH);
url.addPath(layer.getServerLayerId() + LEGEND_ICONS_TYPE);
setIcon(url.toString());
}
}
// vectorlayer
public LayerTreeLegendItemNode(LayerTreeLegendNode parent, VectorLayer layer, int ruleIndex, String title) {
super(parent.tree, parent.layer);
this.parent = parent;
setTitle(title);
updateStyle(layer);
}
public void updateStyle(VectorLayer layer) {
String name = layer.getLayerInfo().getNamedStyleInfo().getName();
setName(name + "_" + ruleIndex);
url.addPath(LEGEND_ICONS_PATH);
url.addPath(layer.getServerLayerId());
url.addPath(name);
url.addPath(ruleIndex + ".png");
setIcon(url.toString());
}
@Override
public void updateIcon() {
// leave my icons alone!
}
public LayerTreeLegendNode getParent() {
return parent;
}
public void setParent(LayerTreeLegendNode parent) {
this.parent = parent;
}
}
@Override
protected void syncNodeState(boolean layersOnly) {
for (TreeNode childnode : tree.getAllNodes(tree.getRoot())) {
if (childnode instanceof LayerTreeLegendNode) {
if (((LayerTreeLegendNode) childnode).layer.isShowing()) {
tree.openFolder(childnode);
} else {
tree.closeFolder(childnode);
}
} else if (!layersOnly && !(childnode instanceof LayerTreeLegendItemNode)) {
if (childnode.getAttributeAsBoolean(EXPANDED_ATTR)) {
tree.openFolder(childnode);
} else {
tree.closeFolder(childnode);
}
}
}
}
@Override
protected TreeGrid createTreeGrid() {
return createTreeGridInfoWindowRollover();
}
@Override
protected void onIconClick(TreeNode node) {
if (node instanceof LayerTreeLegendNode) {
super.onIconClick(node);
} // else if (node instanceof TreeNode) {
// TODO -- show/hide all layers in folder
GWT.log("TODO");
}
protected TreeGrid createTreeGridInfoWindowRollover() {
return new TreeGrid() {
private HLayout rollOverTools;
private HLayout emptyRollOver;
@Override
protected Canvas getRollOverCanvas(Integer rowNum, Integer colNum) {
if (rollOverTools == null) {
rollOverTools = new HLayout();
rollOverTools.setSnapTo("TR");
rollOverTools.setWidth(25);
rollOverTools.setHeight(LAYERTREEBUTTON_SIZE);
emptyRollOver = new HLayout();
emptyRollOver.setWidth(1);
emptyRollOver.setHeight(LAYERTREEBUTTON_SIZE);
ImgButton showInfo = new ImgButton();
showInfo.setShowDown(false);
showInfo.setShowRollOver(false);
showInfo.setLayoutAlign(Alignment.CENTER);
showInfo.setSrc(SHOW_LAYERINFO_ICON);
showInfo.setPrompt(MESSAGES.layerTreeWithLegendLayerActionsToolTip());
showInfo.setHeight(16);
showInfo.setWidth(16);
showInfo.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
LayerActions la = new LayerActions(rollOverLayerTreeNode.getLayer());
la.draw();
}
});
rollOverTools.addMember(showInfo);
}
ListGridRecord lgr = this.getRecord(rowNum);
if (lgr instanceof LayerTreeLegendItemNode) {
rollOverLayerTreeNode = ((LayerTreeLegendItemNode) lgr).parent;
} else if (lgr instanceof LayerTreeLegendNode) {
rollOverLayerTreeNode = (LayerTreeTreeNode) lgr;
} else {
rollOverLayerTreeNode = null;
rollOverTools.setVisible(false);
return emptyRollOver;
}
rollOverTools.setVisible(true);
return rollOverTools;
}
};
}
protected TreeGrid createTreeGridFullRollover() {
return new TreeGrid() {
private HLayout rollOverTools;
private HLayout emptyRollOver;
private Canvas[] toolButtons = new Canvas[0];
@Override
protected Canvas getRollOverCanvas(Integer rowNum, Integer colNum) {
if (rollOverTools == null) {
rollOverTools = new HLayout();
rollOverTools.setSnapTo("TR");
rollOverTools.setWidth(50);
rollOverTools.setHeight(LAYERTREEBUTTON_SIZE);
emptyRollOver = new HLayout();
emptyRollOver.setWidth(1);
emptyRollOver.setHeight(LAYERTREEBUTTON_SIZE);
ClientLayerTreeInfo layerTreeInfo = (ClientLayerTreeInfo) mapModel.getMapInfo().getWidgetInfo(
ClientLayerTreeInfo.IDENTIFIER);
if (layerTreeInfo != null) {
for (ClientToolInfo tool : layerTreeInfo.getTools()) {
String id = tool.getId();
IButton button = null;
ToolbarBaseAction action = LayerTreeRegistry.getToolbarAction(id, mapWidget);
if (action instanceof LayerTreeAction) {
button = new LayerTreeButton(CombinedLayertree.this, (LayerTreeAction) action);
} else if (action instanceof LayerTreeModalAction) {
button =
new LayerTreeModalButton(CombinedLayertree.this, (LayerTreeModalAction) action);
}
if (button != null) {
rollOverTools.addMember(button);
LayoutSpacer spacer = new LayoutSpacer();
spacer.setWidth(2);
rollOverTools.addMember(spacer);
}
}
}
toolButtons = rollOverTools.getMembers();
}
ListGridRecord lgr = this.getRecord(rowNum);
if (lgr instanceof LayerTreeLegendItemNode) {
rollOverLayerTreeNode = ((LayerTreeLegendItemNode) lgr).parent;
} else if (lgr instanceof LayerTreeLegendNode) {
rollOverLayerTreeNode = (LayerTreeTreeNode) lgr;
} else {
rollOverLayerTreeNode = null;
rollOverTools.setVisible(false);
return emptyRollOver;
}
rollOverTools.setVisible(true);
updateButtonIconsAndStates();
return rollOverTools;
}
/**
* Updates the icons and the state of the buttons in the toolbar based upon the current layer
*
* @param toolStripMembers
* data for the toolbar
*/
private void updateButtonIconsAndStates() {
for (Canvas toolButton : toolButtons) {
if (toolButton instanceof LayerTreeModalButton) {
((LayerTreeModalButton) toolButton).update();
} else if (toolButton instanceof LayerTreeButton) {
((LayerTreeButton) toolButton).update();
}
}
}
};
}
/**
* General definition of an action button for the layer tree.
*
* @author Frank Wynants
* @author Pieter De Graef
*/
private class LayerTreeButton extends IButton {
private final CombinedLayertree tree;
private final LayerTreeAction action;
public LayerTreeButton(final CombinedLayertree tree, final LayerTreeAction action) {
super();
this.tree = tree;
this.action = action;
setWidth(LAYERTREEBUTTON_SIZE);
setHeight(LAYERTREEBUTTON_SIZE);
setIconSize(LAYERTREEBUTTON_SIZE - 8);
setIcon(action.getIcon());
setTooltip(action.getTooltip());
setActionType(SelectionType.BUTTON);
setShowDisabledIcon(false);
addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
try {
action.onClick(tree.rollOverLayerTreeNode.getLayer());
update();
} catch (Throwable t) {
GWT.log("LayerTreeButton onClick error", t);
}
}
});
}
public void update() {
LayerTreeTreeNode selected = tree.rollOverLayerTreeNode;
if (selected != null && action.isEnabled(selected.getLayer())) {
setDisabled(false);
setIcon(action.getIcon());
setTooltip(action.getTooltip());
} else {
setDisabled(true);
GWT.log("LayerTreeButton" + action.getDisabledIcon());
setIcon(action.getDisabledIcon());
setTooltip("");
}
}
}
/**
* General definition of a modal button for the layer tree.
*
* @author Frank Wynants
* @author Pieter De Graef
*/
private class LayerTreeModalButton extends IButton {
private final CombinedLayertree tree;
private final LayerTreeModalAction modalAction;
/**
* Constructor
*
* @param tree
* The currently selected layer
* @param modalAction
* The action coupled to this button
*/
public LayerTreeModalButton(final CombinedLayertree tree, final LayerTreeModalAction modalAction) {
super();
this.tree = tree;
this.modalAction = modalAction;
setWidth(LAYERTREEBUTTON_SIZE);
setHeight(LAYERTREEBUTTON_SIZE);
setIconSize(LAYERTREEBUTTON_SIZE - 8);
setIcon(modalAction.getDeselectedIcon());
setActionType(SelectionType.CHECKBOX);
setTooltip(modalAction.getDeselectedTooltip());
setShowDisabledIcon(false);
this.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
LayerTreeTreeNode selectedLayerNode = tree.rollOverLayerTreeNode;
if (LayerTreeModalButton.this.isSelected()) {
modalAction.onSelect(selectedLayerNode.getLayer());
} else {
modalAction.onDeselect(selectedLayerNode.getLayer());
}
selectedLayerNode.updateIcon();
update();
}
});
}
public void update() {
LayerTreeTreeNode selected = tree.rollOverLayerTreeNode;
if (selected != null && modalAction.isEnabled(selected.getLayer())) {
setDisabled(false);
} else {
setSelected(false);
setDisabled(true);
GWT.log("LayerTreeModalButton" + modalAction.getDisabledIcon());
setIcon(modalAction.getDisabledIcon());
setTooltip("");
}
if (selected != null && modalAction.isSelected(selected.getLayer())) {
setIcon(modalAction.getSelectedIcon());
setTooltip(modalAction.getSelectedTooltip());
select();
} else if (selected != null) {
setIcon(modalAction.getDeselectedIcon());
setTooltip(modalAction.getDeselectedTooltip());
deselect();
}
}
}
// -- part of legend
@Override
protected void initialize() {
super.initialize();
ClientLayerTreeInfo ltwli = (ClientLayerTreeInfo) mapWidget.getMapModel().getMapInfo()
.getWidgetInfo(ClientLayerTreeInfo.IDENTIFIER);
setIconSize(ltwli == null ? DEFAULT_ICONSIZE : ltwli.getIconSize());
for (Layer<?> layer : mapModel.getLayers()) {
registrations.add(layer.addLayerChangedHandler(new LayerChangedHandler() {
public void onLabelChange(LayerLabeledEvent event) {
GWT.log("Legend: onLabelChange() - " + event.getLayer().getLabel());
// find the node & update the icon
for (TreeNode node : tree.getAllNodes()) {
if (node.getName().equals(event.getLayer().getLabel()) && node instanceof LayerTreeTreeNode) {
((LayerTreeTreeNode) node).updateIcon();
}
}
}
public void onVisibleChange(LayerShownEvent event) {
GWT.log("Legend: onVisibleChange() - " + event.getLayer().getLabel());
// find the node & update the icon
for (TreeNode node : tree.getAllNodes()) {
if (node.getName().equals(event.getLayer().getLabel()) && node instanceof LayerTreeTreeNode) {
((LayerTreeTreeNode) node).updateIcon();
}
}
}
}));
registrations.add(layer.addLayerStyleChangedHandler(new LayerStyleChangedHandler() {
public void onLayerStyleChange(LayerStyleChangeEvent event) {
GWT.log("Legend: onLayerStyleChange()");
Layer<?> layer = event.getLayer();
if (layer instanceof VectorLayer) {
for (LayerTreeLegendItemNode node : legendIcons.get(layer)) {
node.updateStyle((VectorLayer) layer);
}
}
}
}));
if (layer instanceof VectorLayer) {
VectorLayer vl = (VectorLayer) layer;
registrations.add(vl.addLayerFilteredHandler(new LayerFilteredHandler() {
public void onFilterChange(LayerFilteredEvent event) {
GWT.log("Legend: onLayerFilterChange() - " + event.getLayer().getLabel());
// find the node & update the icon
for (TreeNode node : tree.getAllNodes()) {
if (node.getName().equals(event.getLayer().getLabel())
&& node instanceof LayerTreeTreeNode) {
((LayerTreeTreeNode) node).updateIcon();
}
}
}
}));
}
}
}
/** Remove all handlers on unload. */
protected void onUnload() {
if (registrations != null) {
for (HandlerRegistration registration : registrations) {
registration.removeHandler();
}
}
super.onUnload();
}
}
|
package com.redhat.ceylon.eclipse.code.quickfix;
import static com.redhat.ceylon.eclipse.code.outline.CeylonLabelProvider.CHANGE;
import java.util.Collection;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.OperationCanceledException;
import org.eclipse.jface.text.contentassist.ICompletionProposal;
import org.eclipse.ltk.core.refactoring.Change;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Value;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree;
import com.redhat.ceylon.eclipse.code.editor.CeylonEditor;
import com.redhat.ceylon.eclipse.code.refactor.RenameRefactoring;
public class ConvertGetterToMethodProposal extends ChangeCorrectionProposal {
public static void addConvertGetterToMethodProposal(Collection<ICompletionProposal> proposals,
CeylonEditor editor, Node node) {
Value getter = null;
if (node instanceof Tree.AttributeGetterDefinition) {
getter = ((Tree.AttributeGetterDefinition) node).getDeclarationModel();
}
else if (node instanceof Tree.AttributeDeclaration &&
((Tree.AttributeDeclaration) node).getSpecifierOrInitializerExpression()
instanceof Tree.LazySpecifierExpression) {
getter = ((Tree.AttributeDeclaration) node).getDeclarationModel();
}
else if (node instanceof Tree.MemberOrTypeExpression) {
Declaration decl = ((Tree.MemberOrTypeExpression) node).getDeclaration();
if (decl instanceof Value) {
getter = (Value) decl;
}
}
if (getter != null) {
addConvertGetterToMethodProposal(proposals, editor, getter);
}
}
private static void addConvertGetterToMethodProposal(Collection<ICompletionProposal> proposals,
CeylonEditor editor, Value getter) {
try {
RenameRefactoring refactoring = new RenameRefactoring(editor) {
@Override
public String getName() {
return "Convert getter to method";
};
};
refactoring.setNewName(getter.getName() + "()");
if (refactoring.getDeclaration() == null
|| !refactoring.getDeclaration().equals(getter)
|| !refactoring.isEnabled()
|| !refactoring.checkAllConditions(new NullProgressMonitor()).isOK()) {
return;
}
Change change = refactoring.createChange(new NullProgressMonitor());
ConvertGetterToMethodProposal proposal = new ConvertGetterToMethodProposal(change, getter);
if (!proposals.contains(proposal)) {
proposals.add(proposal);
}
} catch (OperationCanceledException e) {
// noop
} catch (CoreException e) {
throw new RuntimeException(e);
}
}
private ConvertGetterToMethodProposal(Change change, Value getter) {
super("Convert getter '" + getter.getName() + "' to method", change, 10, CHANGE);
}
}
|
package FileBuilder;
import Autogeneration.*;
import Util.UI5Icons;
import Util.Writer;
import com.intellij.icons.AllIcons;
import com.intellij.ide.actions.CreateFileFromTemplateAction;
import com.intellij.ide.actions.CreateFileFromTemplateDialog;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NonNls;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.regex.Pattern;
public class UI5CreateFileAction extends CreateFileFromTemplateAction implements DumbAware {
@NonNls
private static final String DEFAULT_HTML_TEMPLATE_PROPERTY = "";
public UI5CreateFileAction() {
super("UI5 Component", "Creates new UI5 Component", UI5Icons.getIcon());
}
@Override
protected void postProcess(final PsiFile createdElement, final String templateName, Map<String, String> customProperties) {
//TODO implement it as a background task
if (templateName == "ui5.properties") {
return;
}
ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
@Override
public void run() {
//setting path
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
indicator.setText("Creating UI5 View");
indicator.setFraction(10);
String[] templatePattern = templateName.split(Pattern.quote("."));
String ext = templatePattern[templatePattern.length - 1];
String createdName = createdElement.getName();
String newCreatedName = createdName.replace("." + ext, "") + ".view." + ext;
createdElement.setName(newCreatedName);
String viewFilePath = createdElement.getVirtualFile().getParent().getPath() + "\\" + newCreatedName;
String controllerFilePath = createdElement.getVirtualFile().getParent().getPath() + "\\" + createdName.replace("." + ext, "") + ".controller.js";
File controllerFile = new File(controllerFilePath);
File viewFile = new File(viewFilePath);
UI5View ui5View = null;
indicator.setFraction(30);
//which view is created
if (ext.equals("js")) {
ui5View = new JSView();
} else if (ext.equals("xml")) {
ui5View = new XMLView();
} else if (ext.equals("json")) {
ui5View = new JSONView();
} else if (ext.equals("html")) {
ui5View = new HTMLView();
} else {
}
//get the module path for autogeneration
Module module = ModuleUtilCore.findModuleForPsiElement(createdElement);
createdElement.getOriginalFile().delete();
if (module != null) {
String[] projectDircs;
String[] fileDircs;
String projectPath = module.getProject().getBasePath();
String filePath = new File(createdElement.getVirtualFile().getParent().getPath()).getPath();
projectDircs = projectPath.split(Pattern.quote(File.separator));
fileDircs = filePath.split(Pattern.quote(File.separator));
for (int i = 0; i < projectDircs.length; i++) {
if (projectDircs[i].equals(fileDircs[i])) {
fileDircs[i] = "";
}
}
String codePath = "";
for (int i = 0; i < fileDircs.length; i++) {
if (!fileDircs[i].isEmpty()) {
codePath += fileDircs[i];
if (i != fileDircs.length - 1) {
codePath += ".";
}
}
}
String viewCode = ui5View.autogenerateCode(UI5Library.Desktop, codePath + "." + createdName.replace("." + ext, ""));
String controllerCode = Controller.getAutogenerateCode(codePath, createdName.replace("." + ext, ""));
try {
Writer.writeToFile(controllerFile, controllerCode);
Writer.writeToFile(viewFile, viewCode);
} catch (IOException e) {
Messages.showErrorDialog(createdElement.getProject(), e.getMessage(), "Create File from Template");
e.printStackTrace();
}
}
}
}, "Adding UI5 View", false, createdElement.getProject());
// super.postProcess(createdElement, templateName, customProperties);
}
@Override
protected String getDefaultTemplateProperty() {
return DEFAULT_HTML_TEMPLATE_PROPERTY;
}
@Override
protected void buildDialog(Project project, PsiDirectory directory, CreateFileFromTemplateDialog.Builder builder) {
builder.setTitle("UI5 Component")
.addKind("Javascript View", AllIcons.FileTypes.JavaScript, "ui5.view.js")
.addKind("HTML View", AllIcons.FileTypes.Html, "ui5.view.html")
.addKind("JSON View", AllIcons.FileTypes.Json, "ui5.view.json")
.addKind("XML View", AllIcons.FileTypes.Xml, "ui5.view.xml")
.addKind("i18n", AllIcons.FileTypes.Properties, "ui5.properties");
}
@Override
protected String getActionName(PsiDirectory directory, String newName, String templateName) {
return "UI5 file";
}
}
|
package org.yakindu.sct.generator.core.filesystem;
import java.io.File;
import org.eclipse.emf.common.util.URI;
import org.eclipse.xtext.generator.JavaIoFileSystemAccess;
import org.eclipse.xtext.parser.IEncodingProvider;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
/**
* @author Johannes Dicks - Initial contribution and API
*
*/
@Singleton
public class SCTJavaIoFileSystemAccess extends JavaIoFileSystemAccess implements ISCTFileSystemAccess {
public static final String BASE_DIR = "filesystemAccess.absolute.baseDir";
@Inject
@Named(BASE_DIR)
protected String absoluteBaseDir;
@Inject
private IEncodingProvider encodingProvider;
private String projectName;
public void setContext(String projectName) {
this.projectName = projectName;
}
@Override
public void setOutputPath(String outputName, String path) {
super.setOutputPath(outputName, absoluteBaseDir + File.separator + projectName + File.separator + path);
}
@Override
protected String getEncoding(URI fileURI) {
return encodingProvider.getEncoding(fileURI);
}
}
|
package com.opengamma.financial.convention.initializer;
import org.joda.beans.JodaBeanUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.financial.convention.OvernightIndexConvention;
import com.opengamma.financial.convention.VanillaIborLegConvention;
import com.opengamma.financial.security.index.IborIndex;
import com.opengamma.financial.security.index.OvernightIndex;
import com.opengamma.master.convention.ConventionDocument;
import com.opengamma.master.convention.ConventionMaster;
import com.opengamma.master.convention.ConventionSearchRequest;
import com.opengamma.master.convention.ConventionSearchResult;
import com.opengamma.master.convention.ManageableConvention;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMasterUtils;
import com.opengamma.util.ArgumentChecker;
/**
* A tool that allows a convention master to be initialized.
* <p>
* Conventions are typically stored in a master database, however they may be
* initialized from code as they rarely change.
*
* Convention lookup relies on appropriate securities being present see {@code com.opengamma.financial.security.index.IborIndex}.
* Old style behaviour (without security based lookup) is preserved by calling the deprecated init() and associated functions.
*/
public abstract class ConventionMasterInitializer {
/** Logger. */
private static final Logger s_logger = LoggerFactory.getLogger(ConventionMasterInitializer.class);
/**
* Initializes the specified master.
*
* @param master the master to initialize, not null
* @deprecated use the init() that also takes a SecurityMaster
*/
@Deprecated
public abstract void init(ConventionMaster master);
/**
* Initializes the specified master.
*
* Default implementation, should be overridden by child if security master should be populated.
*
* @param master the master to initialize, not null
* @param securityMaster the security master, not null
*/
public void init(ConventionMaster master, SecurityMaster securityMaster) {
init(master);
}
/**
* Adds a convention to the specified master.
*
* @param master the master to initialize, not null
* @param convention the convention to add, null ignored
*/
protected void addConvention(ConventionMaster master, ManageableConvention convention) {
if (convention != null) {
ConventionSearchRequest request = new ConventionSearchRequest();
request.setName(convention.getName());
ConventionSearchResult result = master.search(request);
switch (result.getDocuments().size()) {
case 0:
master.add(new ConventionDocument(convention));
break;
case 1:
if (JodaBeanUtils.equalIgnoring(convention, result.getFirstConvention(), ManageableConvention.meta().uniqueId()) == false) {
ConventionDocument doc = result.getFirstDocument();
doc.setConvention(convention);
master.update(doc);
}
break;
default:
// these are supposed to be unique by name in the database
s_logger.warn("Multiple conventions with the same name in database: " + convention.getName());
for (ManageableConvention similar : result.getConventions()) {
if (JodaBeanUtils.equalIgnoring(convention, similar, ManageableConvention.meta().uniqueId())) {
return; // already in database
}
}
master.add(new ConventionDocument(convention));
break;
}
}
}
protected void addSecurity(SecurityMaster securityMaster, ManageableSecurity security) {
if (securityMaster == null) {
s_logger.warn("Tried to add a security to aid convention lookup but no security master set: " + security.getName());
return;
}
SecurityMasterUtils.addOrUpdateSecurity(securityMaster, security);
}
protected void addIborSecurity(final SecurityMaster securityMaster, final VanillaIborLegConvention convention) {
ArgumentChecker.notEmpty(convention.getExternalIdBundle(), "externalIdBundle");
addSecurity(securityMaster, new IborIndex(convention.getName(), convention.getName(), convention.getResetTenor(), convention.getIborIndexConvention(), convention.getExternalIdBundle()));
}
protected void addOvernightSecurity(final SecurityMaster securityMaster, final OvernightIndexConvention convention) {
ArgumentChecker.notEmpty(convention.getExternalIdBundle(), "externalIdBundle");
addSecurity(securityMaster,
new OvernightIndex(convention.getName(), convention.getName(), convention.getExternalIdBundle().iterator().next(),
convention.getExternalIdBundle()));
}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
|
package gov.nih.nci.cabig.caaers.rules.business.service;
import gov.nih.nci.cabig.caaers.domain.AdverseEvent;
import gov.nih.nci.cabig.caaers.domain.ExpeditedAdverseEventReport;
import gov.nih.nci.cabig.caaers.domain.Organization;
import gov.nih.nci.cabig.caaers.domain.Study;
import gov.nih.nci.cabig.caaers.domain.StudyOrganization;
import gov.nih.nci.cabig.caaers.domain.report.Report;
import gov.nih.nci.cabig.caaers.domain.report.ReportDefinition;
import gov.nih.nci.cabig.caaers.rules.RuleException;
import gov.nih.nci.cabig.caaers.rules.brxml.RuleSet;
import gov.nih.nci.cabig.caaers.rules.common.CategoryConfiguration;
import gov.nih.nci.cabig.caaers.rules.common.RuleType;
import gov.nih.nci.cabig.caaers.rules.common.RuleUtil;
import gov.nih.nci.cabig.caaers.rules.domain.AdverseEventEvaluationResult;
import gov.nih.nci.cabig.caaers.rules.objectgraph.FactResolver;
import gov.nih.nci.cabig.caaers.rules.runtime.BusinessRulesExecutionService;
import gov.nih.nci.cabig.caaers.rules.runtime.BusinessRulesExecutionServiceImpl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class AdverseEventEvaluationServiceImpl implements AdverseEventEvaluationService {
//Replace with spring injection
private BusinessRulesExecutionService businessRulesExecutionService = new BusinessRulesExecutionServiceImpl();
private RulesEngineService rulesEngineService= new RulesEngineServiceImpl();
//private ReportDefinitionDao reportDefinitionDao;
//private ReportServiceImpl reportService;
public static final String CAN_NOT_DETERMINED = "CAN_NOT_DETERMINED";
public static final String SERIOUS_ADVERSE_EVENT = "SERIOUS_ADVERSE_EVENT";
/**
* This method will asses adverse event and will return one of the
* following vlue
* 1. Routine AE
* 2. SAE
* 3. Can't be determined
* Calling this method again and again will not affect the rules
* firing adversly as nothing gets fires subsequently
*
* fire the rules at sponsor defined defined study level..
* if not rules specified , then fire sponsor level rules.
*
*/
public String assesAdverseEvent(AdverseEvent ae, Study study) throws Exception{
String message = evaluateSponsorTarget(ae,study, null , RuleType.REPORT_SCHEDULING_RULES.getName());
if (!message.equals(CAN_NOT_DETERMINED)) {
return SERIOUS_ADVERSE_EVENT;
}
for(StudyOrganization so : study.getStudyOrganizations() )
{
message = evaluateInstitutionTarget(ae, study, so.getOrganization(), null , RuleType.REPORT_SCHEDULING_RULES.getName());
if (!message.equals(CAN_NOT_DETERMINED)) {
return SERIOUS_ADVERSE_EVENT;
}
}
System.out.println("message is : " + message );
return CAN_NOT_DETERMINED;
}
public Map<String,List<String>> evaluateSAEReportSchedule(ExpeditedAdverseEventReport aeReport) throws Exception {
Map<String,List<String>> map = new HashMap<String,List<String>>();
List<AdverseEvent> aes = aeReport.getAdverseEvents();
List<String> reportDefinitionsForSponsor = new ArrayList<String>();
for(AdverseEvent ae : aes )
{
String message = evaluateSponsorTarget(ae,aeReport.getStudy(), null , RuleType.REPORT_SCHEDULING_RULES.getName());
if (!message.equals(CAN_NOT_DETERMINED)) {
String[] messages = RuleUtil.charSeparatedStringToStringArray(message,"\\|\\|");
for (int i=0;i<messages.length;i++) {
System.out.println("adding .... " + messages[i]);
reportDefinitionsForSponsor.add(messages[i]);
}
}
}
//System.out.println("KEY IN IS :" + aeReport.getStudy().getPrimaryFundingSponsorOrganization().getName());
map.put(aeReport.getStudy().getPrimaryFundingSponsorOrganization().getName(), reportDefinitionsForSponsor);
Study study = aeReport.getStudy();
//TO-DO get orgs like FDA, CALGB and add to this list
for(StudyOrganization so : study.getStudyOrganizations() )
{
List<String> reportDefinitionsForInstitution = new ArrayList<String>();
for(AdverseEvent ae : aes ) {
String message = evaluateInstitutionTarget(ae, study, so.getOrganization(), null , RuleType.REPORT_SCHEDULING_RULES.getName());
if (!message.equals(CAN_NOT_DETERMINED)) {
String[] messages = RuleUtil.charSeparatedStringToStringArray(message,"\\|\\|");
for (int i=0;i<messages.length;i++) {
reportDefinitionsForInstitution.add(messages[i]);
}
//break;
}
}
//System.out.println("KEY-2 IN IS :" + so.getOrganization().getName());
//chek for key
List<String> existingList = map.get(so.getOrganization().getName());
if (existingList != null ) {
reportDefinitionsForInstitution.addAll(existingList);
}
map.put(so.getOrganization().getName(), reportDefinitionsForInstitution);
}
return map;
}
public List<String> mandatorySections(ExpeditedAdverseEventReport aeReport) throws Exception{
List<AdverseEvent> aes = aeReport.getAdverseEvents();
List<String> mandatorySections = new ArrayList<String>();
for(AdverseEvent ae : aes )
{
for(Report report : aeReport.getReports() ) {
String message = evaluateSponsorTarget(ae,aeReport.getStudy(),report.getReportDefinition(),RuleType.MANDATORY_SECTIONS_RULES.getName());
if (!message.equals(CAN_NOT_DETERMINED)) {
String[] messages = RuleUtil.charSeparatedStringToStringArray(message,"\\|\\|");
for (int i=0;i<messages.length;i++) {
//System.out.println("adding .... " + messages[i]);
if (!mandatorySections.contains(messages[i])) {
mandatorySections.add(messages[i]);
}
}
}
}
}
for(StudyOrganization so : aeReport.getStudy().getStudyOrganizations() )
{
for(AdverseEvent ae : aes )
{
for(Report report : aeReport.getReports() ) {
String message = evaluateInstitutionTarget(ae,aeReport.getStudy(),so.getOrganization(), report.getReportDefinition(),RuleType.MANDATORY_SECTIONS_RULES.getName());
if (!message.equals(CAN_NOT_DETERMINED)) {
String[] messages = RuleUtil.charSeparatedStringToStringArray(message,"\\|\\|");
for (int i=0;i<messages.length;i++) {
//System.out.println("adding .... " + messages[i]);
if (!mandatorySections.contains(messages[i])) {
mandatorySections.add(messages[i]);
}
}
}
}
}
}
return mandatorySections;
}
/**
* fire the rules at sponsor defined defined study level..
* if not rules specified , then fire sponsor level rules.
*
*/
private String evaluateSponsorTarget(AdverseEvent ae, Study study, ReportDefinition reportDefinition, String ruleTypeName) throws Exception{
String sponsor_define_study_level_evaluation = null;
String sponsor_level_evaluation = null;
String final_result = null;
/**
* get and fire study level rules
*/
sponsor_define_study_level_evaluation = sponsorDefinedStudyLevelRules(ae, study, reportDefinition, ruleTypeName);
// if study level rule exist and null message...
if (sponsor_define_study_level_evaluation == null) {
return CAN_NOT_DETERMINED;
// if study level rules not found , then get to sponsor rules..
} else if (sponsor_define_study_level_evaluation.equals("no_rules_found")) {
sponsor_level_evaluation = sponsorLevelRules(ae, study, reportDefinition, ruleTypeName);
final_result = sponsor_level_evaluation;
// if study level rules exist and returned a message..
} else {
final_result = sponsor_define_study_level_evaluation;
}
if (final_result == null || "no_rules_found".endsWith(final_result)) {
final_result = CAN_NOT_DETERMINED;
}
return final_result;
}
private String evaluateInstitutionTarget(AdverseEvent ae, Study study , Organization organization, ReportDefinition reportDefinition, String ruleTypeName) throws Exception {
String institution_define_study_level_evaluation = null;
String institution_level_evaluation = null;
String final_result = null;
/**
* get and fire study level rules
*/
institution_define_study_level_evaluation = institutionDefinedStudyLevelRules(ae, study, organization, reportDefinition, ruleTypeName);
// if study level rule exist and null message...
if (institution_define_study_level_evaluation == null) {
return CAN_NOT_DETERMINED;
// if study level rules not found , then get to sponsor rules..
} else if (institution_define_study_level_evaluation.equals("no_rules_found")) {
institution_level_evaluation = institutionLevelRules(ae, study, organization, reportDefinition, ruleTypeName);
final_result = institution_level_evaluation;
// if study level rules exist and returned a message..
} else {
final_result = institution_define_study_level_evaluation;
}
if (final_result == null || "no_rules_found".endsWith(final_result)) {
final_result = CAN_NOT_DETERMINED;
}
return final_result;
}
/**
* fire the rules at institution defined defined study level..
* if not rules specified , then fire institution level rules.
*
*/
// RULE METHODS
private String sponsorLevelRules(AdverseEvent ae, Study study, ReportDefinition reportDefinition, String ruleTypeName) throws Exception{
String message = null;
String bindURI = getBindURI(study.getPrimaryFundingSponsorOrganization().getName(), "","SPONSOR",ruleTypeName);
RuleSet ruleSetForSponsor = rulesEngineService.getRuleSetForSponsor(ruleTypeName, study.getPrimaryFundingSponsorOrganization().getName());
if(ruleSetForSponsor==null){
return "no_rules_found";
//throw new Exception("There are no rules configured for adverse event scheduling for this sponsor!");
}
AdverseEventEvaluationResult evaluationForSponsor = new AdverseEventEvaluationResult();
try {
evaluationForSponsor = this.getEvaluationObject(ae, study, study.getPrimaryFundingSponsorOrganization(), reportDefinition, bindURI);
} catch (Exception e) {
// TODO Auto-generated catch block
throw new Exception(e.getMessage(),e);
}
message = evaluationForSponsor.getMessage();
return message;
}
private String sponsorDefinedStudyLevelRules(AdverseEvent ae, Study study, ReportDefinition reportDefinition, String ruleTypeName) throws Exception{
String message = null;
String bindURI = getBindURI(study.getPrimaryFundingSponsorOrganization().getName(), study.getShortTitle(),"SPONSOR_DEFINED_STUDY",ruleTypeName);
RuleSet ruleSetForSponsorDefinedStudy = rulesEngineService.getRuleSetForSponsorDefinedStudy(ruleTypeName, study.getShortTitle(), study.getPrimaryFundingSponsorOrganization().getName());
if(ruleSetForSponsorDefinedStudy==null){
return "no_rules_found";
//throw new Exception("There are no rules configured for adverse event assesment for this sponsor defined study!");
}
AdverseEventEvaluationResult evaluationForSponsorDefinedStudy = new AdverseEventEvaluationResult();
try {
evaluationForSponsorDefinedStudy = this.getEvaluationObject(ae, study, study.getPrimaryFundingSponsorOrganization(), reportDefinition, bindURI);
} catch (Exception e) {
// TODO Auto-generated catch block
throw new Exception(e.getMessage(),e);
}
message = evaluationForSponsorDefinedStudy.getMessage();
return message;
}
private String institutionDefinedStudyLevelRules(AdverseEvent ae, Study study , Organization organization, ReportDefinition reportDefinition, String ruleTypeName) throws Exception{
String message = null;
String studyShortTitle = study.getShortTitle();
String organizationName = organization.getName();
String bindURI = getBindURI(organizationName, studyShortTitle,"INSTITUTION_DEFINED_STUDY",ruleTypeName);
RuleSet ruleSetForInstitutionDefinedStudy = rulesEngineService.getRuleSetForInstitutionDefinedStudy(ruleTypeName, studyShortTitle, organizationName);
if(ruleSetForInstitutionDefinedStudy==null){
return "no_rules_found";
//throw new Exception("There are no rules configured for adverse event assesment for this sponsor defined study!");
}
AdverseEventEvaluationResult evaluationForInstitutionDefinedStudy = new AdverseEventEvaluationResult();
try {
evaluationForInstitutionDefinedStudy = this.getEvaluationObject(ae, study, organization, reportDefinition, bindURI);
} catch (Exception e) {
// TODO Auto-generated catch block
throw new Exception(e.getMessage(),e);
}
message = evaluationForInstitutionDefinedStudy.getMessage();
return message;
}
private String institutionLevelRules(AdverseEvent ae, Study study, Organization organization, ReportDefinition reportDefinition, String ruleTypeName) throws Exception{
String message = null;
String organizationName = organization.getName();
System.out.println("org name : " + organizationName);
String bindURI = getBindURI(organizationName, "","INSTITUTION",ruleTypeName);
System.out.println("url " + bindURI);
RuleSet ruleSetForInstiution = rulesEngineService.getRuleSetForInstitution(ruleTypeName, organizationName);
if(ruleSetForInstiution==null){
return "no_rules_found";
//throw new Exception("There are no rules configured for adverse event scheduling for this sponsor!");
}
AdverseEventEvaluationResult evaluationForInstitution = new AdverseEventEvaluationResult();
try {
evaluationForInstitution = this.getEvaluationObject(ae, study, organization, reportDefinition, bindURI);
} catch (Exception e) {
// TODO Auto-generated catch block
throw new Exception(e.getMessage(),e);
}
message = evaluationForInstitution.getMessage();
System.out.println("message " + message);
return message;
}
private String getBindURI(String sponsorOrInstitutionName, String studyName, String type, String ruleSetName){
String bindURI = null;
if (type.equalsIgnoreCase("SPONSOR")){
bindURI = CategoryConfiguration.SPONSOR_BASE.getPackagePrefix() + "." +RuleUtil.getStringWithoutSpaces(sponsorOrInstitutionName)+"."+RuleUtil.getStringWithoutSpaces(ruleSetName);
}
if(type.equalsIgnoreCase("INSTITUTION")){
bindURI = CategoryConfiguration.INSTITUTION_BASE.getPackagePrefix() + "."+RuleUtil.getStringWithoutSpaces(sponsorOrInstitutionName)+"."+RuleUtil.getStringWithoutSpaces(ruleSetName);
}
if(type.equalsIgnoreCase("SPONSOR_DEFINED_STUDY")){
//System.out.println("sponsorOrInstitutionName : " + sponsorOrInstitutionName);
//System.out.println("studyName : " + studyName);
bindURI = CategoryConfiguration.SPONSOR_DEFINED_STUDY_BASE.getPackagePrefix() + "."+RuleUtil.getStringWithoutSpaces(studyName)+"."+RuleUtil.getStringWithoutSpaces(sponsorOrInstitutionName)+"."+RuleUtil.getStringWithoutSpaces(ruleSetName);
}
if(type.equalsIgnoreCase("INSTITUTION_DEFINED_STUDY")){
bindURI = CategoryConfiguration.INSTITUTION_DEFINED_STUDY_BASE.getPackagePrefix() + "."+RuleUtil.getStringWithoutSpaces(studyName)+"."+RuleUtil.getStringWithoutSpaces(sponsorOrInstitutionName)+"."+RuleUtil.getStringWithoutSpaces(ruleSetName);
}
return bindURI;
}
private AdverseEventEvaluationResult getEvaluationObject(AdverseEvent ae, Study study, Organization organization, ReportDefinition reportDefinition, String bindURI) throws Exception{
AdverseEventEvaluationResult evaluationForSponsor = new AdverseEventEvaluationResult();
List<Object> inputObjects = new ArrayList<Object>();
inputObjects.add(ae);
FactResolver f = new FactResolver();
inputObjects.add(f);
if (study != null ) {
inputObjects.add(study);
}
if (organization != null) {
inputObjects.add(organization);
}
if (reportDefinition != null) {
inputObjects.add(reportDefinition);
}
//inputObjects.add(new AdverseEventEvaluationResult());
List<Object> outputObjects = null;
try{
outputObjects = businessRulesExecutionService.fireRules(bindURI, inputObjects);
}catch(Exception ex){
/**
* Don't do anything, it means there are no rules for this package
*/
throw new RuleException("There are no rule configured for this sponsor",ex);
//return evaluationForSponsor;
}
Iterator<Object> it = outputObjects.iterator();
while(it.hasNext()){
Object obj = it.next();
if(obj instanceof AdverseEventEvaluationResult) {
evaluationForSponsor = (AdverseEventEvaluationResult)obj;
break;
}
}
return evaluationForSponsor;
}
}
|
package com.commercetools.pspadapter.payone.notification.common;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import static util.UpdatePaymentTestHelper.*;
import com.commercetools.pspadapter.payone.domain.payone.model.common.Notification;
import com.commercetools.pspadapter.payone.domain.payone.model.common.NotificationAction;
import com.commercetools.pspadapter.payone.domain.payone.model.common.TransactionStatus;
import io.sphere.sdk.client.BlockingSphereClient;
import io.sphere.sdk.commands.UpdateAction;
import io.sphere.sdk.payments.Payment;
import io.sphere.sdk.payments.Transaction;
import io.sphere.sdk.payments.TransactionDraftBuilder;
import io.sphere.sdk.payments.TransactionState;
import io.sphere.sdk.payments.TransactionType;
import io.sphere.sdk.payments.commands.PaymentUpdateCommand;
import io.sphere.sdk.payments.commands.updateactions.AddInterfaceInteraction;
import io.sphere.sdk.payments.commands.updateactions.AddTransaction;
import io.sphere.sdk.payments.commands.updateactions.SetStatusInterfaceCode;
import io.sphere.sdk.payments.commands.updateactions.SetStatusInterfaceText;
import io.sphere.sdk.utils.MoneyImpl;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import util.PaymentTestHelper;
import javax.money.MonetaryAmount;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.List;
/**
* @author Jan Wolter
*/
@RunWith(MockitoJUnitRunner.class)
public class CaptureNotificationProcessorTest {
private static final Integer millis = 1450365542;
private static final ZonedDateTime timestamp =
ZonedDateTime.of(LocalDateTime.ofEpochSecond(millis, 0, ZoneOffset.UTC), ZoneId.of("UTC"));
@Mock
private BlockingSphereClient client;
@InjectMocks
private CaptureNotificationProcessor testee;
@Captor
private ArgumentCaptor<PaymentUpdateCommand> paymentRequestCaptor;
private final PaymentTestHelper testHelper = new PaymentTestHelper();
private Notification notification;
@Before
public void setUp() throws Exception {
notification = new Notification();
notification.setPrice("20.00");
notification.setCurrency("EUR");
notification.setTxtime(millis.toString());
notification.setSequencenumber("23");
notification.setClearingtype("cc");
notification.setTxaction(NotificationAction.CAPTURE);
notification.setTransactionStatus(TransactionStatus.COMPLETED);
}
@Test
@SuppressWarnings("unchecked")
public void processingPendingNotificationAboutUnknownTransactionAddsChargeTransactionWithStatePending() throws Exception {
// arrange
final Payment payment = testHelper.dummyPaymentOneAuthPending20EuroCC();
payment.getTransactions().clear();
notification.setTransactionStatus(TransactionStatus.PENDING);
// act
testee.processTransactionStatusNotification(notification, payment);
// assert
verify(client).executeBlocking(paymentRequestCaptor.capture());
final List<? extends UpdateAction<Payment>> updateActions = paymentRequestCaptor.getValue().getUpdateActions();
final MonetaryAmount amount = MoneyImpl.of(notification.getPrice(), notification.getCurrency());
final AddTransaction transaction = AddTransaction.of(TransactionDraftBuilder
.of(TransactionType.CHARGE, amount, timestamp)
.state(TransactionState.PENDING)
.interactionId(notification.getSequencenumber())
.build());
final AddInterfaceInteraction interfaceInteraction = getAddInterfaceInteraction(notification, timestamp);
final SetStatusInterfaceCode statusInterfaceCode = getSetStatusInterfaceCode(notification);
final SetStatusInterfaceText statusInterfaceText = getSetStatusInterfaceText(notification);
assertThat(updateActions).as("# of payment update actions").hasSize(4);
assertThat(updateActions).as("added transaction")
.filteredOn(u -> u.getAction().equals("addTransaction"))
.usingElementComparatorOnFields(
"transaction.type", "transaction.amount", "transaction.state", "transaction.timestamp")
.containsOnlyOnce(transaction);
assertStandardUpdateActions(updateActions, interfaceInteraction, statusInterfaceCode, statusInterfaceText);
}
@Test
@SuppressWarnings("unchecked")
public void processingCompletedNotificationAboutUnknownTransactionAddsChargeTransactionWithStatePending() throws Exception {
// arrange
final Payment payment = testHelper.dummyPaymentOneAuthPending20EuroCC();
payment.getTransactions().clear();
// act
testee.processTransactionStatusNotification(notification, payment);
// assert
verify(client).executeBlocking(paymentRequestCaptor.capture());
final List<? extends UpdateAction<Payment>> updateActions = paymentRequestCaptor.getValue().getUpdateActions();
final MonetaryAmount amount = MoneyImpl.of(notification.getPrice(), notification.getCurrency());
final AddTransaction transaction = AddTransaction.of(TransactionDraftBuilder
.of(TransactionType.CHARGE, amount, timestamp)
.state(TransactionState.PENDING)
.interactionId(notification.getSequencenumber())
.build());
final AddInterfaceInteraction interfaceInteraction = getAddInterfaceInteraction(notification, timestamp);
final SetStatusInterfaceCode statusInterfaceCode = getSetStatusInterfaceCode(notification);
final SetStatusInterfaceText statusInterfaceText = getSetStatusInterfaceText(notification);
assertThat(updateActions).as("# of payment update actions").hasSize(4);
assertThat(updateActions).as("added transaction")
.filteredOn(u -> u.getAction().equals("addTransaction"))
.usingElementComparatorOnFields(
"transaction.type", "transaction.amount", "transaction.state", "transaction.timestamp")
.containsOnlyOnce(transaction);
assertStandardUpdateActions(updateActions, interfaceInteraction, statusInterfaceCode, statusInterfaceText);
}
@Test
@SuppressWarnings("unchecked")
public void processingPendingNotificationForPendingChargeTransactionDoesNotChangeState() throws Exception {
// arrange
final Payment payment = testHelper.dummyPaymentOneChargePending20Euro();
notification.setSequencenumber(payment.getTransactions().get(0).getInteractionId());
notification.setTransactionStatus(TransactionStatus.PENDING);
// act
testee.processTransactionStatusNotification(notification, payment);
// assert
verify(client).executeBlocking(paymentRequestCaptor.capture());
final List<? extends UpdateAction<Payment>> updateActions = paymentRequestCaptor.getValue().getUpdateActions();
final AddInterfaceInteraction interfaceInteraction = getAddInterfaceInteraction(notification, timestamp);
final SetStatusInterfaceCode statusInterfaceCode = getSetStatusInterfaceCode(notification);
final SetStatusInterfaceText statusInterfaceText = getSetStatusInterfaceText(notification);
assertThat(updateActions).as("# of payment update actions").hasSize(3);
assertStandardUpdateActions(updateActions, interfaceInteraction, statusInterfaceCode, statusInterfaceText);
}
@Test
@SuppressWarnings("unchecked")
public void processingCompletedNotificationForPendingChargeTransactionDoesNotChangeState() throws Exception {
// arrange
final Payment payment = testHelper.dummyPaymentOneChargePending20Euro();
final Transaction chargeTransaction = payment.getTransactions().get(0);
notification.setSequencenumber(chargeTransaction.getInteractionId());
// act
testee.processTransactionStatusNotification(notification, payment);
// assert
verify(client).executeBlocking(paymentRequestCaptor.capture());
final List<? extends UpdateAction<Payment>> updateActions = paymentRequestCaptor.getValue().getUpdateActions();
final AddInterfaceInteraction interfaceInteraction = getAddInterfaceInteraction(notification, timestamp);
final SetStatusInterfaceCode statusInterfaceCode = getSetStatusInterfaceCode(notification);
final SetStatusInterfaceText statusInterfaceText = getSetStatusInterfaceText(notification);
assertThat(updateActions).as("# of payment update actions").hasSize(3);
assertStandardUpdateActions(updateActions, interfaceInteraction, statusInterfaceCode, statusInterfaceText);
}
}
|
package com.splicemachine.derby.impl.sql.execute.operations;
import com.splicemachine.derby.test.framework.*;
import org.junit.*;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
/**
* Unit Test for making sure MultiProbeTableScanOperation is logically correct. Once we have metrics information,
* the test should be expanded to show that we only filter the records required.
*
*/
public class MultiProbeTableScanOperatonIT {
public static final String CLASS_NAME = MultiProbeTableScanOperatonIT.class.getSimpleName();
protected static SpliceWatcher spliceClassWatcher = new SpliceWatcher(CLASS_NAME);
protected static SpliceSchemaWatcher schemaWatcher = new SpliceSchemaWatcher(CLASS_NAME);
protected static SpliceTableWatcher t1Watcher = new SpliceTableWatcher("user_groups",schemaWatcher.schemaName,"(user_id BIGINT NOT NULL,segment_id INT NOT NULL,unixtime BIGINT, primary key(segment_id, user_id))");
protected static SpliceTableWatcher t2Watcher = new SpliceTableWatcher("docs",schemaWatcher.schemaName,"(id varchar(128) not null)");
protected static SpliceTableWatcher t3Watcher = new SpliceTableWatcher("colls",schemaWatcher.schemaName,"(id varchar(128) not null,collid smallint not null)");
protected static SpliceTableWatcher t4Watcher = new SpliceTableWatcher("b",schemaWatcher.schemaName,"(d decimal(10))");
private static int size = 10;
@ClassRule
public static TestRule chain = RuleChain.outerRule(spliceClassWatcher)
.around(schemaWatcher)
.around(t1Watcher)
.around(t2Watcher)
.around(t3Watcher)
.around(t4Watcher)
.around(new SpliceDataWatcher() {
@Override
protected void starting(Description description) {
try {
PreparedStatement ps = spliceClassWatcher.prepareStatement("insert into " + t1Watcher.toString() + " values (?,?,?)");
for (int i = 0; i < size; i++) {
ps.setInt(1, i);
ps.setInt(2, i);
ps.setLong(3, 1l);
ps.execute();
}
for (int i = 0; i < size; i++) {
if ((i == 4) || (i == 6)) {
ps.setInt(1, size + i);
ps.setInt(2, i);
ps.setLong(3, 1l);
ps.execute();
}
}
ps = spliceClassWatcher.prepareStatement("insert into " + t2Watcher.toString() + " values (?)");
ps.setString(1, "24");
ps.addBatch();
ps.setString(1, "25");
ps.addBatch();
ps.setString(1, "36");
ps.addBatch();
ps.setString(1, "27");
ps.addBatch();
ps.setString(1, "124");
ps.addBatch();
ps.setString(1, "567");
ps.addBatch();
ps.executeBatch();
ps = spliceClassWatcher.prepareStatement("insert into " + t3Watcher.toString() + " values (?,?)");
ps.setString(1, "123");
ps.setShort(2, (short) 2);
ps.addBatch();
ps.setString(1, "124");
ps.setShort(2, (short) -5);
ps.addBatch();
ps.setString(1, "24");
ps.setShort(2, (short) 1);
ps.addBatch();
ps.setString(1, "26");
ps.setShort(2, (short) -2);
ps.addBatch();
ps.setString(1, "36");
ps.setShort(2, (short) 1);
ps.addBatch();
ps.setString(1, "37");
ps.setShort(2, (short) 8);
ps.addBatch();
ps.executeBatch();
ps = spliceClassWatcher.prepareStatement("insert into " + t4Watcher.toString() + " values (?)");
for (int i = 0; i <= 10; ++i) {
ps.setInt(1, i);
ps.addBatch();
}
ps.executeBatch();
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
spliceClassWatcher.closeAll();
}
}
});
@Rule public SpliceWatcher methodWatcher = new SpliceWatcher(CLASS_NAME);
@Test
public void testMultiProbeTableScanScroll() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select user_id from "+t1Watcher+" where segment_id in (1,5,8,12)");
int i = 0;
while (rs.next()) {
i++;
}
Assert.assertEquals("Incorrect count returned!",3,i);
}
@Test
//DB-2575
public void testMultiProbeTableScanWithEqualPredicate() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select user_id from "+t1Watcher+" where segment_id in (1,5,8,12) and unixtime = 1");
int i = 0;
while (rs.next()) {
i++;
}
Assert.assertEquals("Incorrect count returned!",3,i);
}
@Test
public void testMultiProbeTableScanSink() throws Exception {
ResultSet rs = methodWatcher.executeQuery("select count(user_id) from (" +
"select user_id, ("+
"max(case when segment_id = 7 then true else false end) " +
"or " +
"max(case when segment_id = 4 then true else false end)" +
") as in_segment " +
"from "+t1Watcher+ " " +
"where segment_id in (7, 4) " +
"group by user_id) foo where in_segment = true");
int i = 0;
while (rs.next()) {
i++;
Assert.assertEquals("Incorrect Distinct Customers",3,rs.getLong(1));
}
Assert.assertEquals("Incorrect records returned!",1,i);
}
@Test
public void testMultiProbeInSubQueryWithIndex() throws Exception {
/* Regression test for DB-1040 */
SpliceIndexWatcher indexWatcher = new SpliceIndexWatcher(t3Watcher.tableName, t3Watcher.getSchema(),"new_index_3",t3Watcher.getSchema(),"(collid)");
indexWatcher.starting(null);
try{
ResultSet rs = methodWatcher.executeQuery("select count(id) from docs where id > any (select id from colls where collid in (-2,1))");
Assert.assertTrue("No results returned!",rs.next());
int count = rs.getInt(1);
Assert.assertEquals("Incorrect count returned!",4,count);
Assert.assertFalse("Too many rows returned!",rs.next());
}finally{
indexWatcher.drop();
}
}
@Test
//DB-4854
public void testMultiProbeIntegerValue() throws Exception {
SpliceIndexWatcher indexWatcher = new SpliceIndexWatcher(t4Watcher.tableName, t4Watcher.getSchema(),"idxb",t4Watcher.getSchema(),"(d)");
indexWatcher.starting(null);
ResultSet rs = methodWatcher.executeQuery("select count(*) from b where d in (9,10)");
Assert.assertTrue(rs.next());
Assert.assertTrue("wrong count", rs.getInt(1) == 2);
rs = methodWatcher.executeQuery("select count(*) from b where d in (9)");
Assert.assertTrue(rs.next());
Assert.assertTrue("wrong count", rs.getInt(1)==1);
}
}
|
package uk.ac.ox.zoo.seeg.abraid.mp.dataacquisition.diseaseextent;
import ch.lambdaj.group.Group;
import org.hamcrest.core.IsEqual;
import org.joda.time.DateTime;
import uk.ac.ox.zoo.seeg.abraid.mp.common.domain.*;
import java.util.*;
import static ch.lambdaj.Lambda.*;
public class DiseaseExtentGeneratorHelper {
private static final int SCALING_FACTOR = 50;
// Input fields
private DiseaseGroup diseaseGroup;
private DiseaseExtentParameters parameters;
private List<AdminUnitDiseaseExtentClass> currentDiseaseExtent;
private List<? extends AdminUnitGlobalOrTropical> adminUnits;
private List<DiseaseOccurrenceForDiseaseExtent> occurrences;
private List<DiseaseExtentClass> diseaseExtentClasses;
private List<AdminUnitReview> reviews;
// Working fields
private Map<AdminUnitGlobalOrTropical, List<DiseaseOccurrenceForDiseaseExtent>> occurrencesByAdminUnit;
private Map<AdminUnitGlobalOrTropical, DiseaseExtentClass> classesByAdminUnit;
private Map<Integer, Integer> numberOfOccurrencesByCountry;
private Map<Integer, List<AdminUnitReview>> reviewsByAdminUnit;
public DiseaseExtentGeneratorHelper(DiseaseGroup diseaseGroup, DiseaseExtentParameters parameters,
List<AdminUnitDiseaseExtentClass> currentDiseaseExtent,
List<? extends AdminUnitGlobalOrTropical> adminUnits,
List<DiseaseOccurrenceForDiseaseExtent> occurrences,
List<DiseaseExtentClass> diseaseExtentClasses) {
this.diseaseGroup = diseaseGroup;
this.parameters = parameters;
this.currentDiseaseExtent = currentDiseaseExtent;
this.adminUnits = adminUnits;
this.occurrences = occurrences;
this.diseaseExtentClasses = diseaseExtentClasses;
}
public DiseaseGroup getDiseaseGroup() {
return diseaseGroup;
}
public List<AdminUnitDiseaseExtentClass> getCurrentDiseaseExtent() {
return currentDiseaseExtent;
}
public List<DiseaseOccurrenceForDiseaseExtent> getOccurrences() {
return occurrences;
}
public Map<AdminUnitGlobalOrTropical, List<DiseaseOccurrenceForDiseaseExtent>> getOccurrencesByAdminUnit() {
return occurrencesByAdminUnit;
}
public Map<Integer, Integer> getNumberOfOccurrencesByCountry() {
return numberOfOccurrencesByCountry;
}
public Map<Integer, List<AdminUnitReview>> getReviewsByAdminUnit() {
return reviewsByAdminUnit;
}
public void setReviews(List<AdminUnitReview> reviews) {
this.reviews = reviews;
}
/**
* Groups the disease occurrences by admin unit (global or tropical).
*/
public void groupOccurrencesByAdminUnit() {
// Create empty groups of occurrences by admin unit
occurrencesByAdminUnit = new HashMap<>();
for (AdminUnitGlobalOrTropical adminUnit : adminUnits) {
occurrencesByAdminUnit.put(adminUnit, new ArrayList<DiseaseOccurrenceForDiseaseExtent>());
}
// Add occurrences to the groups
for (DiseaseOccurrenceForDiseaseExtent occurrence : occurrences) {
AdminUnitGlobalOrTropical adminUnit = getAdminUnitByGaulCode(occurrence.getAdminUnitGaulCode());
occurrencesByAdminUnit.get(adminUnit).add(occurrence);
}
}
/**
* Groups the occurrences by country (strictly, it groups the number of occurrences by country GAUL code).
* The country GAUL code is taken from the admin unit global/tropical entity.
*/
public void groupOccurrencesByCountry() {
numberOfOccurrencesByCountry = new HashMap<>();
for (DiseaseOccurrenceForDiseaseExtent occurrence : occurrences) {
AdminUnitGlobalOrTropical adminUnit = getAdminUnitByGaulCode(occurrence.getAdminUnitGaulCode());
Integer countryGaulCode = adminUnit.getCountryGaulCode();
if (countryGaulCode != null) {
// Country GAUL code found, so add 1 to the number of occurrences for this country
Integer numberOfOccurrences = numberOfOccurrencesByCountry.get(countryGaulCode);
numberOfOccurrencesByCountry.put(countryGaulCode, nullSafeAdd(numberOfOccurrences, 1));
}
}
}
/**
* Groups the expert reviews by admin unit (strictly, by admin unit GAUL code).
*/
public void groupReviewsByAdminUnit() {
// Group the reviews by admin unit GAUL code
Group<AdminUnitReview> group = group(reviews,
by(on(AdminUnitReview.class).getAdminUnitGlobalOrTropicalGaulCode()));
// Convert the grouping to a map from GAUL code to reviews
reviewsByAdminUnit = new HashMap<>();
for (Group<AdminUnitReview> subgroup : group.subgroups()) {
reviewsByAdminUnit.put((Integer) subgroup.key(), subgroup.findAll());
}
}
/**
* Computes the disease extent classes for an initial disease extent.
*/
public void computeInitialDiseaseExtentClasses() {
computeDiseaseExtentClasses(new DiseaseExtentClassComputer() {
@Override
public DiseaseExtentClass compute(AdminUnitGlobalOrTropical adminUnit,
List<DiseaseOccurrenceForDiseaseExtent> occurrencesForAdminUnit) {
// Computes the initial disease extent class for one admin unit
int occurrenceCount = occurrencesForAdminUnit.size();
if (occurrenceCount == 0) {
return computeDiseaseExtentClassForCountry(adminUnit.getCountryGaulCode());
} else {
return computeDiseaseExtentClassUsingOccurrenceCount(occurrenceCount, 1);
}
}
});
}
/**
* Computes the disease extent classes for updating an existing disease extent.
*/
public void computeUpdatedDiseaseExtentClasses() {
computeDiseaseExtentClasses(new DiseaseExtentClassComputer() {
@Override
public DiseaseExtentClass compute(AdminUnitGlobalOrTropical adminUnit,
List<DiseaseOccurrenceForDiseaseExtent> occurrencesForAdminUnit) {
// Computes the updated disease extent class for one admin unit
List<AdminUnitReview> reviewsForAdminUnit = getReviewsByGaulCode(adminUnit.getGaulCode());
if (occurrencesForAdminUnit.size() == 0 && reviewsForAdminUnit.size() == 0) {
return computeDiseaseExtentClassForCountry(adminUnit.getCountryGaulCode());
} else {
return computeDiseaseExtentClassUsingOccurrencesAndReviews(occurrencesForAdminUnit,
reviewsForAdminUnit);
}
}
});
}
/**
* For each admin unit, convert its list of disease occurrences into a disease extent class.
* @param computer A method for converting the disease occurrences for 1 admin unit into a disease extent class.
*/
public void computeDiseaseExtentClasses(DiseaseExtentClassComputer computer) {
classesByAdminUnit = new HashMap<>();
for (Map.Entry<AdminUnitGlobalOrTropical, List<DiseaseOccurrenceForDiseaseExtent>> occurrenceByAdminUnit :
occurrencesByAdminUnit.entrySet()) {
AdminUnitGlobalOrTropical adminUnit = occurrenceByAdminUnit.getKey();
List<DiseaseOccurrenceForDiseaseExtent> occurrencesForAdminUnit = occurrenceByAdminUnit.getValue();
DiseaseExtentClass extentClass = computer.compute(adminUnit, occurrencesForAdminUnit);
classesByAdminUnit.put(occurrenceByAdminUnit.getKey(), extentClass);
}
}
/**
* Forms the disease extent for saving to the database.
* Updates existing rows or creates new rows as appropriate.
* @return A list of AdminUnitDiseaseExtentClass rows for saving.
*/
public List<AdminUnitDiseaseExtentClass> getDiseaseExtentToSave() {
List<AdminUnitDiseaseExtentClass> adminUnitDiseaseExtentClasses = new ArrayList<>();
for (Map.Entry<AdminUnitGlobalOrTropical, List<DiseaseOccurrenceForDiseaseExtent>> occurrenceByAdminUnit :
occurrencesByAdminUnit.entrySet()) {
AdminUnitGlobalOrTropical adminUnit = occurrenceByAdminUnit.getKey();
AdminUnitDiseaseExtentClass row = findAdminUnitDiseaseExtentClass(adminUnit);
if (row == null) {
row = createAdminUnitDiseaseExtentClass(adminUnit);
}
DiseaseExtentClass newClass = classesByAdminUnit.get(adminUnit);
row.setHasClassChanged(!newClass.equals(row.getDiseaseExtentClass()));
row.setDiseaseExtentClass(newClass);
row.setOccurrenceCount(occurrenceByAdminUnit.getValue().size());
adminUnitDiseaseExtentClasses.add(row);
}
return adminUnitDiseaseExtentClasses;
}
/**
* Computes a disease extent class, based on the number of occurrences and a scaling factor.
* @param occurrenceCount The number of occurrences.
* @param factor A scaling factor that is multiplied by the number of occurrences when doing the comparison.
* @return The computed disease extent class.
*/
public DiseaseExtentClass computeDiseaseExtentClassUsingOccurrenceCount(int occurrenceCount, int factor) {
// Convert an occurrence count into a disease extent class, using the disease extent parameters
if (occurrenceCount >= parameters.getMinimumOccurrencesForPresence() * factor) {
return findDiseaseExtentClass(DiseaseExtentClass.PRESENCE);
} else if (occurrenceCount >= parameters.getMinimumOccurrencesForPossiblePresence() * factor) {
return findDiseaseExtentClass(DiseaseExtentClass.POSSIBLE_PRESENCE);
} else {
return findDiseaseExtentClass(DiseaseExtentClass.UNCERTAIN);
}
}
/**
* Computes a disease extent class, based on a list of occurrences and a list of reviews.
* @param occurrencesList The list of occurrences.
* @param reviewsList The list of reviews.
* @return The computed disease extent class.
*/
public DiseaseExtentClass computeDiseaseExtentClassUsingOccurrencesAndReviews(
List<DiseaseOccurrenceForDiseaseExtent> occurrencesList, List<AdminUnitReview> reviewsList) {
double overallScore = computeScoreForOccurrencesAndReviews(occurrencesList, reviewsList);
if (overallScore > 1) {
return findDiseaseExtentClass(DiseaseExtentClass.PRESENCE);
} else if (overallScore > 0) {
return findDiseaseExtentClass(DiseaseExtentClass.POSSIBLE_PRESENCE);
} else if (overallScore == 0) {
return findDiseaseExtentClass(DiseaseExtentClass.UNCERTAIN);
} else if (overallScore >= -1) {
return findDiseaseExtentClass(DiseaseExtentClass.POSSIBLE_ABSENCE);
} else {
return findDiseaseExtentClass(DiseaseExtentClass.ABSENCE);
}
}
/**
* Computes a disease extent class, based on a list of occurrences and a list of reviews.
* @param occurrencesList The list of occurrences.
* @param reviewsList The list of reviews.
* @return The computed disease extent class.
*/
public double computeScoreForOccurrencesAndReviews(List<DiseaseOccurrenceForDiseaseExtent> occurrencesList,
List<AdminUnitReview> reviewsList) {
// Compute the score for each occurrence and each review, and take the average
// Be extra careful with int -> double conversions...
double occurrencesScore = computeOccurrencesScore(occurrencesList);
double reviewsScore = computeReviewsScore(reviewsList);
double totalScore = occurrencesScore + reviewsScore;
double totalCount = occurrencesList.size() + reviewsList.size();
return (totalCount == 0) ? 0 : (totalScore / totalCount);
}
/**
* Computes a disease extent class for a country.
* @param countryGaulCode The country's GAUL code.
* @return The computed disease extent class.
*/
public DiseaseExtentClass computeDiseaseExtentClassForCountry(Integer countryGaulCode) {
// The disease extent class for a country uses the "occurrence count" method, but with the parameters
// multiplied by a factor of 2
if (countryGaulCode != null) {
Integer occurrenceCount = numberOfOccurrencesByCountry.get(countryGaulCode);
if (occurrenceCount != null) {
return computeDiseaseExtentClassUsingOccurrenceCount(occurrenceCount, 2);
}
}
return findDiseaseExtentClass(DiseaseExtentClass.UNCERTAIN);
}
private int computeOccurrencesScore(List<DiseaseOccurrenceForDiseaseExtent> occurrenceList) {
DateTime oldestDateForHigherScore =
DateTime.now().minusYears(parameters.getMaximumYearsAgoForHigherOccurrenceScore());
// Unlike computeReviewsScore(), the total is an integer so that we can maintain full accuracy over multiple
// additions
int total = 0;
for (DiseaseOccurrenceForDiseaseExtent occurrence : occurrenceList) {
// The score for each occurrence depends on the occurrence date. It scores the "higher score" unless it
// is older than the oldest date allowed for the higher score, in which case it scores the "lower score".
// These values are all defined by the disease extent parameters.
boolean useLowerScore = occurrence.getOccurrenceDate().isBefore(oldestDateForHigherScore);
total += useLowerScore ? parameters.getLowerOccurrenceScore() : parameters.getHigherOccurrenceScore();
}
return total;
}
private double computeReviewsScore(List<AdminUnitReview> reviewsList) {
double total = 0;
for (AdminUnitReview review : reviewsList) {
// The response weighting is currently divided by 50 so that the weightings in the database (which
// were chosen for use with the model) can be used for our purposes. Eventually this should be removed.
int scaledResponseWeighting = review.getResponse().getWeighting() / SCALING_FACTOR;
total += scaledResponseWeighting * review.getExpert().getWeighting();
}
return total;
}
private AdminUnitGlobalOrTropical getAdminUnitByGaulCode(int gaulCode) {
return selectUnique(adminUnits, having(
on(AdminUnitGlobalOrTropical.class).getGaulCode(), IsEqual.equalTo(gaulCode)));
}
private List<AdminUnitReview> getReviewsByGaulCode(int adminUnitGaulCode) {
List<AdminUnitReview> reviewsList = reviewsByAdminUnit.get(adminUnitGaulCode);
return (reviewsList == null) ? new ArrayList<AdminUnitReview>() : reviewsList;
}
private DiseaseExtentClass findDiseaseExtentClass(String diseaseExtentClass) {
// Returns the disease extent class with the specified name
return selectUnique(diseaseExtentClasses, having(
on(DiseaseExtentClass.class).getName(), IsEqual.equalTo(diseaseExtentClass)));
}
private AdminUnitDiseaseExtentClass findAdminUnitDiseaseExtentClass(AdminUnitGlobalOrTropical adminUnit) {
// Searches the current disease extent for the specified admin unit. Returns it if found, or null if not found.
int gaulCodeToFind = adminUnit.getGaulCode();
return selectUnique(currentDiseaseExtent, having(
on(AdminUnitDiseaseExtentClass.class).getAdminUnitGlobalOrTropical().getGaulCode(),
IsEqual.equalTo(gaulCodeToFind)));
}
private AdminUnitDiseaseExtentClass createAdminUnitDiseaseExtentClass(AdminUnitGlobalOrTropical adminUnit) {
AdminUnitDiseaseExtentClass row = new AdminUnitDiseaseExtentClass();
row.setDiseaseGroup(diseaseGroup);
row.setAdminUnitGlobalOrTropical(adminUnit);
return row;
}
private int nullSafeAdd(Integer a, Integer b) {
return ((a != null) ? a : 0) + ((b != null) ? b : 0);
}
/**
* Computes the disease extent class for one admin unit.
*/
private interface DiseaseExtentClassComputer {
DiseaseExtentClass compute(AdminUnitGlobalOrTropical adminUnit,
List<DiseaseOccurrenceForDiseaseExtent> occurrencesForAdminUnit);
}
}
|
package bar.foo.yanel.impl.resources;
import org.wyona.yanel.impl.resources.usecase.ExecutableUsecaseResource;
import org.wyona.yanel.impl.resources.usecase.UsecaseException;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
/**
* A simple Usecase example
*/
public class FromScratchResource extends ExecutableUsecaseResource {
private static Logger log = LogManager.getLogger(FromScratchResource.class);
protected static String PARAM_EXAMPLE_FORM_FIELD = "example-form-field";
protected static String PARAM_EXAMPLE_FORM_FIELD_DATE = "example-form-field-date";
/*
* This method is executed when submitting the form provided in the default view (probably implemented as a jelly template).
*/
public void execute() throws UsecaseException {
// get parameters as string submitted by the form
String exampleFormFieldDate = getParameterAsString(PARAM_EXAMPLE_FORM_FIELD_DATE);
// add an info message displayed in the done view jelly template
addInfoMessage("Usecase successfully executed. You accessed the default-view of the usecase at: " + exampleFormFieldDate + ". ");
addInfoMessage("And now is: " + getDate());
}
/*
* This method is executed when canceling the form provided in the default view (probably implemented as a jelly template).
*/
public void cancel() throws UsecaseException {
addInfoMessage("The usecase was canceled at: " + getDate());
}
/*
* Implement some test which are tested before the usecase will e executed
*/
public boolean checkPreconditions() throws UsecaseException {
String exampleFormField = getParameterAsString(PARAM_EXAMPLE_FORM_FIELD);
if (!exampleFormField.equals("hello")) {
addError("You did not enter hello");
return false;
}
return true;
}
/*
* Provide a java object to be used by as a jelly template implemented view
*/
public String getExampleString() throws Exception {
return getDate();
}
/*
* Private helper method for this example. returns a String with the current date and time
*/
private String getDate() {
java.util.Calendar cal = java.util.Calendar.getInstance(java.util.TimeZone.getDefault());
String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
java.text.SimpleDateFormat sdf = new java.text.SimpleDateFormat(DATE_FORMAT);
sdf.setTimeZone(java.util.TimeZone.getDefault());
return sdf.format(cal.getTime());
}
}
|
package ca.corefacility.bioinformatics.irida.ria.web.projects.settings.dto;
/**
* Used to handle requests from the UI to update a specific attribute on a project.
* The field is the attribute to be update, and the value is the value to set to that field.
*/
public class UpdateProjectAttributeRequest {
private String field;
private String value;
public UpdateProjectAttributeRequest() {
}
public UpdateProjectAttributeRequest(String field, String value) {
this.field = field;
this.value = value;
}
public void setField(String field) {
this.field = field;
}
public void setValue(String value) {
this.value = value;
}
public String getField() {
return field;
}
public String getValue() {
return value;
}
}
|
package org.carlspring.strongbox.controllers.layout.maven;
import org.carlspring.commons.encryption.EncryptionAlgorithmsEnum;
import org.carlspring.commons.io.MultipleDigestOutputStream;
import org.carlspring.maven.commons.util.ArtifactUtils;
import org.carlspring.strongbox.artifact.generator.MavenArtifactDeployer;
import org.carlspring.strongbox.client.ArtifactOperationException;
import org.carlspring.strongbox.client.ArtifactTransportException;
import org.carlspring.strongbox.config.IntegrationTest;
import org.carlspring.strongbox.domain.ArtifactEntry;
import org.carlspring.strongbox.domain.RemoteArtifactEntry;
import org.carlspring.strongbox.providers.layout.Maven2LayoutProvider;
import org.carlspring.strongbox.providers.search.MavenIndexerSearchProvider;
import org.carlspring.strongbox.resource.ConfigurationResourceResolver;
import org.carlspring.strongbox.rest.common.MavenRestAssuredBaseTest;
import org.carlspring.strongbox.services.ArtifactEntryService;
import org.carlspring.strongbox.storage.indexing.IndexTypeEnum;
import org.carlspring.strongbox.storage.repository.MavenRepositoryFactory;
import org.carlspring.strongbox.storage.repository.MutableRepository;
import org.carlspring.strongbox.storage.repository.RepositoryPolicyEnum;
import org.carlspring.strongbox.storage.search.SearchRequest;
import org.carlspring.strongbox.storage.search.SearchResult;
import org.carlspring.strongbox.storage.search.SearchResults;
import org.carlspring.strongbox.util.MessageDigestUtils;
import org.carlspring.strongbox.xml.configuration.repository.MutableMavenRepositoryConfiguration;
import javax.inject.Inject;
import javax.xml.bind.JAXBException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import java.io.*;
import java.lang.reflect.UndeclaredThrowableException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import io.restassured.http.Header;
import io.restassured.http.Headers;
import io.restassured.response.ExtractableResponse;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.metadata.Metadata;
import org.apache.maven.artifact.repository.metadata.SnapshotVersion;
import org.apache.maven.model.Plugin;
import org.apache.maven.project.artifact.PluginArtifact;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.junit.jupiter.api.*;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.Spy;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.security.test.context.support.WithUserDetails;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import static io.restassured.module.mockmvc.RestAssuredMockMvc.given;
import static org.junit.jupiter.api.Assertions.*;
/**
* Test cases for {@link MavenArtifactController}.
*
* @author Alex Oreshkevich
* @author Martin Todorov
* @author Pablo Tirado
*/
@IntegrationTest
@ExtendWith(SpringExtension.class)
public class MavenArtifactControllerTest
extends MavenRestAssuredBaseTest
{
private static final String TEST_RESOURCES = "target/test-resources";
private static final String REPOSITORY_RELEASES = "releases";
private static final String REPOSITORY_RELEASES1 = "mact-releases-1";
private static final String REPOSITORY_RELEASES2 = "mact-releases-2";
private static final String REPOSITORY_SNAPSHOTS = "mact-snapshots";
private static File GENERATOR_BASEDIR = new File(ConfigurationResourceResolver.getVaultDirectory() + "/local");
private static String pluginXmlFilePath;
@Spy
private Artifact artifact1 = ArtifactUtils.getArtifactFromGAVTC("org.carlspring.strongbox.metadata" + ":" +
"metadata-foo-maven-plugin" + ":" +
"3.1");
@Spy
private Artifact artifact2 = ArtifactUtils.getArtifactFromGAVTC(
"org.carlspring.strongbox.metadata" + ":" + "metadata-faa-maven-plugin" + ":" + "3.1");
@Spy
private Artifact artifact3 = ArtifactUtils.getArtifactFromGAVTC(
"org.carlspring.strongbox.metadata" + ":" + "metadata-foo-maven-plugin" + ":" + "3.2");
@Spy
private Artifact artifact4 = ArtifactUtils.getArtifactFromGAVTC(
"org.carlspring.strongbox.metadata" + ":" + "metadata-faa-maven-plugin" + ":" + "3.2");
@Spy
private Artifact artifact5 = ArtifactUtils.getArtifactFromGAVTC(
"org.carlspring.strongbox.metadata" + ":" + "metadata-foo" + ":" + "3.1");
@Spy
private Artifact artifact6 = ArtifactUtils.getArtifactFromGAVTC(
"org.carlspring.strongbox.metadata" + ":" + "metadata-foo" + ":" + "3.2");
@Inject
private MavenRepositoryFactory mavenRepositoryFactory;
@Inject
private ArtifactEntryService artifactEntryService;
private MavenArtifactDeployer defaultMavenArtifactDeployer;
@BeforeAll
public static void cleanUp()
throws Exception
{
cleanUp(getRepositoriesToClean(REPOSITORY_RELEASES1,
REPOSITORY_RELEASES2,
REPOSITORY_SNAPSHOTS));
}
private static Set<MutableRepository> getRepositoriesToClean(String... repositoryId)
{
Set<MutableRepository> repositories = new LinkedHashSet<>();
Arrays.asList(repositoryId).forEach(
r -> repositories.add(createRepositoryMock(STORAGE0, r, Maven2LayoutProvider.ALIAS))
);
return repositories;
}
@AfterAll
public static void down()
{
deleteTestResources();
}
private static void deleteTestResources()
{
if (pluginXmlFilePath == null)
{
return;
}
Path dirPath = Paths.get(pluginXmlFilePath).getParent().getParent().getParent();
try
{
Files.walk(dirPath)
.map(Path::toFile)
.sorted(Comparator.comparing(File::isDirectory))
.forEach(File::delete);
}
catch (IOException e)
{
e.printStackTrace();
}
}
private static void writeToZipFile(String path,
ZipOutputStream zipStream)
throws Exception
{
File aFile = new File(path);
FileInputStream fis = new FileInputStream(aFile);
ZipEntry zipEntry = new ZipEntry(path);
zipStream.putNextEntry(zipEntry);
byte[] bytes = new byte[1024];
int length;
while ((length = fis.read(bytes)) >= 0)
{
zipStream.write(bytes, 0, length);
}
zipStream.closeEntry();
fis.close();
}
private static void crateJarFile(String artifactId)
throws Exception
{
String parentPluginPath = String.valueOf(Paths.get(pluginXmlFilePath).getParent());
try (FileOutputStream fos = new FileOutputStream(parentPluginPath + "/" + artifactId + ".jar");
ZipOutputStream zipOS = new ZipOutputStream(fos))
{
writeToZipFile(pluginXmlFilePath + "/plugin.xml", zipOS);
System.out.println("");
}
catch (IOException e)
{
e.printStackTrace();
}
}
private static void createPluginXmlFile(String groupId,
String artifactId,
String version)
throws Exception
{
File file = new File("");
pluginXmlFilePath = file.getCanonicalPath() + "/src/test/resources/temp/" + artifactId + "/META-INF/maven";
Files.createDirectories(Paths.get(pluginXmlFilePath));
String xmlSource = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<plugin>\n" +
" <name>Apache Maven Dependency Plugin</name>\n" +
" <description>Provides utility goals to work with dependencies like copying, unpacking, analyzing, resolving and many more.</description>\n" +
" <groupId>" + groupId + "</groupId>\n" +
" <artifactId>" + artifactId + "</artifactId>\n" +
" <version>" + version + "</version>\n" +
" <goalPrefix>dependency</goalPrefix>\n" +
"</plugin>";
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
Document doc = builder.parse(new InputSource(new StringReader(xmlSource)));
// Write the parsed document to an xml file
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Transformer transformer = transformerFactory.newTransformer();
DOMSource source = new DOMSource(doc);
StreamResult result = new StreamResult(new File(pluginXmlFilePath + "/plugin.xml"));
transformer.transform(source, result);
}
@Override
@BeforeEach
public void init()
throws Exception
{
super.init();
MockitoAnnotations.initMocks(this);
defaultMavenArtifactDeployer = buildArtifactDeployer(Paths.get(""));
MutableMavenRepositoryConfiguration mavenRepositoryConfiguration = new MutableMavenRepositoryConfiguration();
mavenRepositoryConfiguration.setIndexingEnabled(false);
MutableRepository repository1 = mavenRepositoryFactory.createRepository(REPOSITORY_RELEASES1);
repository1.setPolicy(RepositoryPolicyEnum.RELEASE.getPolicy());
repository1.setRepositoryConfiguration(mavenRepositoryConfiguration);
createRepository(STORAGE0, repository1);
// Generate releases
// Used by testPartialFetch():
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
"org.carlspring.strongbox.partial:partial-foo",
new String[]{ "3.1",
// Used by testPartialFetch()
"3.2"
// Used by testPartialFetch()
}
);
// Used by testCopy*():
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
"org.carlspring.strongbox.copy:copy-foo",
new String[]{ "1.1",
// Used by testCopyArtifactFile()
"1.2"
// Used by testCopyArtifactDirectory()
}
);
// Used by testDelete():
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
"com.artifacts.to.delete.releases:delete-foo",
new String[]{ "1.2.1",
// Used by testDeleteArtifactFile
"1.2.2"
// Used by testDeleteArtifactDirectory
}
);
generateMavenMetadata(STORAGE0, REPOSITORY_RELEASES1);
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
"org.carlspring.strongbox.partial:partial-foo",
new String[]{ "3.1",
// Used by testPartialFetch()
"3.2"
// Used by testPartialFetch()
}
);
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
"org.carlspring.strongbox.browse:foo-bar",
new String[]{ "1.0",
// Used by testDirectoryListing()
"2.4"
// Used by testDirectoryListing()
}
);
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES).getAbsolutePath(),
"org.carlspring.strongbox.test:dynamic-privileges",
new String[]{ "1.0"
// Used by testDynamicPrivilegeAssignmentForRepository()
}
);
MutableRepository repository2 = mavenRepositoryFactory.createRepository(REPOSITORY_RELEASES2);
repository2.setPolicy(RepositoryPolicyEnum.RELEASE.getPolicy());
repository2.setRepositoryConfiguration(mavenRepositoryConfiguration);
repository2.setAllowsRedeployment(true);
createRepository(STORAGE0, repository2);
MutableRepository repository3 = mavenRepositoryFactory.createRepository(REPOSITORY_SNAPSHOTS);
repository3.setPolicy(RepositoryPolicyEnum.SNAPSHOT.getPolicy());
createRepository(STORAGE0, repository3);
//noinspection ResultOfMethodCallIgnored
Files.createDirectories(Paths.get(TEST_RESOURCES));
}
@Override
@AfterEach
public void shutdown()
{
try
{
closeIndexersForRepository(STORAGE0, REPOSITORY_RELEASES1);
closeIndexersForRepository(STORAGE0, REPOSITORY_RELEASES2);
closeIndexersForRepository(STORAGE0, REPOSITORY_SNAPSHOTS);
removeRepositories();
cleanUp();
}
catch (Exception e)
{
throw new UndeclaredThrowableException(e);
}
super.shutdown();
}
private void removeRepositories()
throws IOException, JAXBException
{
removeRepositories(getRepositoriesToClean());
}
/**
* Note: This test requires access to the Internet.
*
* @throws Exception
*/
@Test
public void testResolveViaProxyToMavenCentral()
throws Exception
{
String artifactPath = "storages/storage-common-proxies/maven-central/" +
"org/carlspring/maven/derby-maven-plugin/1.9/derby-maven-plugin-1.9.jar";
resolveArtifact(artifactPath, "1.9");
}
/**
* Note: This test requires access to the Internet.
*
* @throws Exception
*/
@Test
public void testResolveViaProxyToMavenCentralInGroup()
throws Exception
{
String artifactPath = "storages/storage-common-proxies/group-common-proxies/" +
"org/carlspring/maven/derby-maven-plugin/1.10/derby-maven-plugin-1.10.jar";
resolveArtifact(artifactPath, "1.10");
}
private void resolveArtifact(String artifactPath,
String version)
throws NoSuchAlgorithmException, IOException
{
InputStream is = client.getResource(artifactPath);
if (is == null)
{
fail("Failed to resolve 'derby-maven-plugin:" + version + ":jar' from Maven Central!");
}
FileOutputStream fos = new FileOutputStream(new File(TEST_RESOURCES, "derby-maven-plugin-" + version + ".jar"));
MultipleDigestOutputStream mdos = new MultipleDigestOutputStream(fos);
int len;
final int size = 1024;
byte[] bytes = new byte[size];
while ((len = is.read(bytes, 0, size)) != -1)
{
mdos.write(bytes, 0, len);
}
mdos.flush();
mdos.close();
String md5Remote = MessageDigestUtils.readChecksumFile(client.getResource(artifactPath + ".md5"));
String sha1Remote = MessageDigestUtils.readChecksumFile(client.getResource(artifactPath + ".sha1"));
final String md5Local = mdos.getMessageDigestAsHexadecimalString(EncryptionAlgorithmsEnum.MD5.getAlgorithm());
final String sha1Local = mdos.getMessageDigestAsHexadecimalString(EncryptionAlgorithmsEnum.SHA1.getAlgorithm());
logger.debug("MD5 [Remote]: " + md5Remote);
logger.debug("MD5 [Local ]: " + md5Local);
logger.debug("SHA-1 [Remote]: " + sha1Remote);
logger.debug("SHA-1 [Local ]: " + sha1Local);
assertEquals(md5Local, md5Remote, "MD5 checksums did not match!");
assertEquals(sha1Local, sha1Remote, "SHA-1 checksums did not match!");
}
@Test
public void testHeadersFetch()
throws Exception
{
Headers headersFromGET, headersFromHEAD;
/* Hosted Repository */
String url = getContextBaseUrl() + "/storages/" + STORAGE0 + "/" + REPOSITORY_RELEASES1;
String pathToPom = "/org/carlspring/strongbox/browse/foo-bar/2.4/foo-bar-2.4.pom";
String artifactPath = url + pathToPom;
headersFromGET = client.getHeadersFromGET(artifactPath);
headersFromHEAD = client.getHeadersfromHEAD(artifactPath);
assertHeadersEquals(headersFromGET, headersFromHEAD);
}
private void assertHeadersEquals(Headers h1,
Headers h2)
{
assertNotNull(h1);
assertNotNull(h2);
for (Header header : h1)
{
if (h2.hasHeaderWithName(header.getName()))
{
assertEquals(header.getValue(), h2.getValue(header.getName()));
}
}
}
@Test
public void testPartialFetch()
throws Exception
{
// test that given artifact exists
String url = getContextBaseUrl() + "/storages/" + STORAGE0 + "/" + REPOSITORY_RELEASES1;
String pathToJar = "/org/carlspring/strongbox/partial/partial-foo/3.1/partial-foo-3.1.jar";
String artifactPath = url + pathToJar;
assertPathExists(artifactPath);
// read remote checksum
String md5Remote = MessageDigestUtils.readChecksumFile(client.getResource(artifactPath + ".md5", true));
String sha1Remote = MessageDigestUtils.readChecksumFile(client.getResource(artifactPath + ".sha1", true));
logger.info("Remote md5 checksum " + md5Remote);
logger.info("Remote sha1 checksum " + sha1Remote);
// calculate local checksum for given algorithms
InputStream is = client.getResource(artifactPath);
logger.debug("Wrote " + is.available() + " bytes.");
ByteArrayOutputStream baos = new ByteArrayOutputStream();
MultipleDigestOutputStream mdos = new MultipleDigestOutputStream(baos);
int size = 1024;
byte[] bytes = new byte[size];
int total = 0;
int len;
while ((len = is.read(bytes, 0, size)) != -1)
{
mdos.write(bytes, 0, len);
total += len;
if (total >= size)
{
break;
}
}
mdos.flush();
bytes = new byte[size];
is.close();
logger.debug("Read " + total + " bytes.");
is = client.getResource(artifactPath, total);
logger.debug("Skipped " + total + " bytes.");
int partialRead = total;
int len2 = 0;
while ((len = is.read(bytes, 0, size)) != -1)
{
mdos.write(bytes, 0, len);
len2 += len;
total += len;
}
mdos.flush();
logger.debug("Wrote " + total + " bytes.");
logger.debug("Partial read, terminated after writing " + partialRead + " bytes.");
logger.debug("Partial read, continued and wrote " + len2 + " bytes.");
logger.debug("Partial reads: total written bytes: " + (partialRead + len2) + ".");
final String md5Local = mdos.getMessageDigestAsHexadecimalString(EncryptionAlgorithmsEnum.MD5.getAlgorithm());
final String sha1Local = mdos.getMessageDigestAsHexadecimalString(EncryptionAlgorithmsEnum.SHA1.getAlgorithm());
logger.debug("MD5 [Remote]: " + md5Remote);
logger.debug("MD5 [Local ]: " + md5Local);
logger.debug("SHA-1 [Remote]: " + sha1Remote);
logger.debug("SHA-1 [Local ]: " + sha1Local);
File artifact = new File("target/partial-foo-3.1.jar");
if (artifact.exists())
{
//noinspection ResultOfMethodCallIgnored
artifact.delete();
//noinspection ResultOfMethodCallIgnored
artifact.createNewFile();
}
FileOutputStream output = new FileOutputStream(artifact);
output.write(baos.toByteArray());
output.close();
assertEquals(md5Remote, md5Local, "Glued partial fetches did not match MD5 checksum!");
assertEquals(sha1Remote, sha1Local, "Glued partial fetches did not match SHA-1 checksum!");
}
@Test
public void testCopyArtifactFile()
throws Exception
{
generateArtifact(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
"org.carlspring.strongbox.copy:copy-foo",
new String[]{ "1.1" }
);
final File destRepositoryBasedir = getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES2);
String artifactPath = "org/carlspring/strongbox/copy/copy-foo/1.1/copy-foo-1.1.jar";
File destArtifactFile = new File(destRepositoryBasedir + "/" + artifactPath).getAbsoluteFile();
if (destArtifactFile.exists())
{
//noinspection ResultOfMethodCallIgnored
destArtifactFile.delete();
}
client.copy(artifactPath,
STORAGE0,
REPOSITORY_RELEASES1,
STORAGE0,
REPOSITORY_RELEASES2);
assertTrue(destArtifactFile.exists(),
"Failed to copy artifact to destination repository '" + destRepositoryBasedir + "'!");
}
@Test
public void testCopyArtifactDirectory()
throws Exception
{
final File destRepositoryBasedir = getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES2);
String artifactPath = "org/carlspring/strongbox/copy/copy-foo/1.2";
// clean up directory from possible previous test executions
File artifactFileRestoredFromTrash = new File(destRepositoryBasedir + "/" + artifactPath).getAbsoluteFile();
if (artifactFileRestoredFromTrash.exists())
{
removeDir(artifactFileRestoredFromTrash.toPath());
}
assertFalse(artifactFileRestoredFromTrash.exists(),
"Unexpected artifact in repository '" + destRepositoryBasedir + "'!");
client.copy(artifactPath,
STORAGE0,
REPOSITORY_RELEASES1,
STORAGE0,
REPOSITORY_RELEASES2);
assertTrue(artifactFileRestoredFromTrash.exists(),
"Failed to copy artifact to destination repository '" + destRepositoryBasedir + "'!");
}
@Test
public void testDeleteArtifactFile()
throws Exception
{
String artifactPath = "com/artifacts/to/delete/releases/delete-foo/1.2.1/delete-foo-1.2.1.jar";
File deletedArtifact = new File(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
artifactPath).getAbsoluteFile();
assertTrue(deletedArtifact.exists(),
"Failed to locate artifact file '" + deletedArtifact.getAbsolutePath() + "'!");
client.delete(STORAGE0, REPOSITORY_RELEASES1, artifactPath);
assertFalse(deletedArtifact.exists(),
"Failed to delete artifact file '" + deletedArtifact.getAbsolutePath() + "'!");
}
@Test
public void testDeleteArtifactDirectory()
throws Exception
{
String artifactPath = "com/artifacts/to/delete/releases/delete-foo/1.2.2";
File deletedArtifact = new File(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(),
artifactPath).getAbsoluteFile();
assertTrue(deletedArtifact.exists(),
"Failed to locate artifact file '" + deletedArtifact.getAbsolutePath() + "'!");
client.delete(STORAGE0, REPOSITORY_RELEASES1, artifactPath);
assertFalse(deletedArtifact.exists(),
"Failed to delete artifact file '" + deletedArtifact.getAbsolutePath() + "'!");
}
@Test
public void testNonExistingDirectoryDownload()
{
String path = "/storages/storage-common-proxies/maven-central/john/doe/";
ExtractableResponse response = client.getResourceWithResponse(path, "");
assertEquals(HttpStatus.NOT_FOUND.value(), response.statusCode(), "Wrong response");
}
@Test
public void testNonExistingArtifactDownload()
{
String path = "/storages/storage-common-proxies/maven-central/john/doe";
ExtractableResponse response = client.getResourceWithResponse(path, "");
assertEquals(response.statusCode(), HttpStatus.NOT_FOUND.value(), "Wrong response");
}
@Test
public void testNonExistingArtifactInNonExistingDirectory()
{
String path = "/storages/storage-common-proxies/maven-central/john/doe/who.jar";
ExtractableResponse response = client.getResourceWithResponse(path, "");
assertEquals(response.statusCode(), HttpStatus.NOT_FOUND.value(), "Wrong response");
}
@Test
public void testNonExistingArtifactInExistingDirectory()
{
String path = "/storages/storage-common-proxies/maven-central/org/carlspring/maven/derby-maven-plugin/1.8/derby-maven-plugin-6.9.jar";
ExtractableResponse response = client.getResourceWithResponse(path, "");
assertEquals(response.statusCode(), HttpStatus.NOT_FOUND.value(), "Wrong response");
}
@Test
public void testDirectoryListing()
throws Exception
{
String artifactPath = "org/carlspring/strongbox/browse/foo-bar";
File artifact = new File(getRepositoryBasedir(STORAGE0, REPOSITORY_RELEASES1).getAbsolutePath(), artifactPath)
.getAbsoluteFile();
assertTrue(artifact.exists(), "Failed to locate artifact file '" + artifact.getAbsolutePath() + "'!");
String basePath = "/api/browse/" + STORAGE0 + "/" + REPOSITORY_RELEASES1;
ExtractableResponse repositoryRoot = client.getResourceWithResponse(basePath, "");
ExtractableResponse trashDirectoryListing = client.getResourceWithResponse(basePath, ".trash");
ExtractableResponse indexDirectoryListing = client.getResourceWithResponse(basePath, ".index");
ExtractableResponse directoryListing = client.getResourceWithResponse(basePath,
"org/carlspring/strongbox/browse/");
ExtractableResponse fileListing = client.getResourceWithResponse(basePath,
"org/carlspring/strongbox/browse/foo-bar/1.0/");
ExtractableResponse invalidPath = client.getResourceWithResponse(basePath,
"org/carlspring/strongbox/browse/1.0/");
String repositoryRootContent = repositoryRoot.asString();
String directoryListingContent = directoryListing.asString();
String fileListingContent = fileListing.asString();
assertFalse(repositoryRootContent.contains(".trash"),
".trash directory should not be visible in directory listing!");
assertEquals(trashDirectoryListing.response().getStatusCode(), HttpStatus.NOT_FOUND.value(),
".trash directory should not be browsable!");
logger.debug(directoryListingContent);
assertTrue(directoryListingContent.contains("org/carlspring/strongbox/browse"));
assertTrue(fileListingContent.contains("foo-bar-1.0.jar"));
assertTrue(fileListingContent.contains("foo-bar-1.0.pom"));
assertEquals(invalidPath.response().getStatusCode(), HttpStatus.NOT_FOUND.value());
Assumptions.assumeTrue(repositoryIndexManager.isPresent());
assertFalse(repositoryRootContent.contains(".index"),
".index directory should not be visible in directory listing!");
assertEquals(indexDirectoryListing.response().getStatusCode(), HttpStatus.OK.value(),
".index directory should be browsable!");
}
@Test
public void testMetadataAtVersionLevel()
throws NoSuchAlgorithmException,
ArtifactOperationException,
IOException,
XmlPullParserException,
ArtifactTransportException
{
String ga = "org.carlspring.strongbox.metadata:metadata-foo";
Artifact artifact1 = ArtifactUtils.getArtifactFromGAVTC(ga + ":3.1-SNAPSHOT");
String snapshotVersion1 = createSnapshotVersion("3.1", 1);
String snapshotVersion2 = createSnapshotVersion("3.1", 2);
String snapshotVersion3 = createSnapshotVersion("3.1", 3);
String snapshotVersion4 = createSnapshotVersion("3.1", 4);
Artifact artifact1WithTimestamp1 = ArtifactUtils.getArtifactFromGAVTC(ga + ":" + snapshotVersion1);
Artifact artifact1WithTimestamp2 = ArtifactUtils.getArtifactFromGAVTC(ga + ":" + snapshotVersion2);
Artifact artifact1WithTimestamp3 = ArtifactUtils.getArtifactFromGAVTC(ga + ":" + snapshotVersion3);
Artifact artifact1WithTimestamp4 = ArtifactUtils.getArtifactFromGAVTC(ga + ":" + snapshotVersion4);
MavenArtifactDeployer artifactDeployer = buildArtifactDeployer(GENERATOR_BASEDIR.toPath());
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp1, STORAGE0, REPOSITORY_SNAPSHOTS);
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp2, STORAGE0, REPOSITORY_SNAPSHOTS);
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp3, STORAGE0, REPOSITORY_SNAPSHOTS);
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp4, STORAGE0, REPOSITORY_SNAPSHOTS);
String path = ArtifactUtils.getVersionLevelMetadataPath(artifact1);
String url = "/storages/" + STORAGE0 + "/" + REPOSITORY_SNAPSHOTS + "/";
String metadataUrl = url + path;
logger.info("[retrieveMetadata] Load metadata by URL " + metadataUrl);
Metadata versionLevelMetadata = defaultMavenArtifactDeployer.retrieveMetadata(url + path);
assertNotNull(versionLevelMetadata);
assertEquals("org.carlspring.strongbox.metadata", versionLevelMetadata.getGroupId());
assertEquals("metadata-foo", versionLevelMetadata.getArtifactId());
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion1, null, "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion1, "javadoc", "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion1, null, "pom");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion2, null, "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion2, "javadoc", "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion2, null, "pom");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion3, null, "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion3, "javadoc", "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion3, null, "pom");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion4, null, "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion4, "javadoc", "jar");
checkSnapshotVersionExistsInMetadata(versionLevelMetadata, snapshotVersion4, null, "pom");
assertNotNull(versionLevelMetadata.getVersioning().getLastUpdated());
}
@Test
public void testMetadataAtGroupAndArtifactIdLevel()
throws Exception
{
// Given
// Plugin Artifacts
String groupId = "org.carlspring.strongbox.metadata";
String artifactId1 = "metadata-foo-maven-plugin";
String artifactId2 = "metadata-faa-maven-plugin";
String artifactId3 = "metadata-foo";
String version1 = "3.1";
String version2 = "3.2";
createPluginXmlFile(groupId, artifactId1, version1);
crateJarFile(artifactId1 + "-" + version1);
String filePath =
Paths.get(pluginXmlFilePath).getParent().toString() + "/" + artifactId1 + "-" + version1 + ".jar";
Mockito.doReturn(new File(filePath)).when(artifact1).getFile();
createPluginXmlFile(groupId, artifactId2, version1);
crateJarFile(artifactId2 + "-" + version1);
filePath = Paths.get(pluginXmlFilePath).getParent().toString() + "/" + artifactId2 + "-" + version1 + ".jar";
Mockito.doReturn(new File(filePath)).when(artifact2).getFile();
//artifact3 = getArtifactFromGAVTC(groupId + ":" + artifactId1 + ":" + version2);
createPluginXmlFile(groupId, artifactId1, version2);
crateJarFile(artifactId1 + "-" + version2);
filePath = Paths.get(pluginXmlFilePath).getParent().toString() + "/" + artifactId1 + "-" + version2 + ".jar";
Mockito.doReturn(new File(filePath)).when(artifact3).getFile();
//artifact4 = getArtifactFromGAVTC(groupId + ":" + artifactId2 + ":" + version2);
createPluginXmlFile(groupId, artifactId2, version2);
crateJarFile(artifactId2 + "-" + version2);
filePath = Paths.get(pluginXmlFilePath).getParent().toString() + "/" + artifactId2 + "-" + version2 + ".jar";
Mockito.doReturn(new File(filePath)).when(artifact4).getFile();
// Artifacts
// Artifact artifact5 = getArtifactFromGAVTC(groupId + ":" + artifactId3 + ":" + version1);
createPluginXmlFile(groupId, artifactId3, version1);
crateJarFile(artifactId3 + "-" + version1);
filePath = Paths.get(pluginXmlFilePath).getParent().toString() + "/" + artifactId3 + "-" + version1 + ".jar";
Mockito.doReturn(new File(filePath)).when(artifact5).getFile();
//artifact6 = getArtifactFromGAVTC(groupId + ":" + artifactId3 + ":" + version2);
createPluginXmlFile(groupId, artifactId3, version2);
crateJarFile(artifactId3 + "-" + version2);
filePath = Paths.get(pluginXmlFilePath).getParent().toString() + "/" + artifactId3 + "-" + version2 + ".jar";
Mockito.doReturn(new File(filePath)).when(artifact6).getFile();
Plugin p1 = new Plugin();
p1.setGroupId(artifact1.getGroupId());
p1.setArtifactId(artifact1.getArtifactId());
p1.setVersion(artifact1.getVersion());
Plugin p2 = new Plugin();
p2.setGroupId(artifact2.getGroupId());
p2.setArtifactId(artifact2.getArtifactId());
p2.setVersion(artifact2.getVersion());
Plugin p3 = new Plugin();
p3.setGroupId(artifact3.getGroupId());
p3.setArtifactId(artifact3.getArtifactId());
p3.setVersion(artifact3.getVersion());
Plugin p4 = new Plugin();
p4.setGroupId(artifact4.getGroupId());
p4.setArtifactId(artifact4.getArtifactId());
p4.setVersion(artifact4.getVersion());
PluginArtifact a = new PluginArtifact(p1, artifact1);
PluginArtifact b = new PluginArtifact(p2, artifact2);
PluginArtifact c = new PluginArtifact(p3, artifact3);
PluginArtifact d = new PluginArtifact(p4, artifact4);
MavenArtifactDeployer artifactDeployer = buildArtifactDeployer(GENERATOR_BASEDIR.toPath());
// When
artifactDeployer.generateAndDeployArtifact(a, STORAGE0, REPOSITORY_RELEASES2);
artifactDeployer.generateAndDeployArtifact(b, STORAGE0, REPOSITORY_RELEASES2);
artifactDeployer.generateAndDeployArtifact(c, STORAGE0, REPOSITORY_RELEASES2);
artifactDeployer.generateAndDeployArtifact(d, STORAGE0, REPOSITORY_RELEASES2);
artifactDeployer.generateAndDeployArtifact(artifact5, STORAGE0, REPOSITORY_RELEASES2);
artifactDeployer.generateAndDeployArtifact(artifact6, STORAGE0, REPOSITORY_RELEASES2);
// Then
// Group level metadata
Metadata groupLevelMetadata = defaultMavenArtifactDeployer.retrieveMetadata("storages/" + STORAGE0 + "/" +
REPOSITORY_RELEASES2 + "/" +
ArtifactUtils.getGroupLevelMetadataPath(
artifact1));
assertNotNull(groupLevelMetadata);
assertEquals(2, groupLevelMetadata.getPlugins().size());
// Artifact Level metadata
Metadata artifactLevelMetadata = defaultMavenArtifactDeployer.retrieveMetadata("storages/" + STORAGE0 + "/" +
REPOSITORY_RELEASES2 + "/" +
ArtifactUtils.getArtifactLevelMetadataPath(
artifact1));
assertNotNull(artifactLevelMetadata);
assertEquals(groupId, artifactLevelMetadata.getGroupId());
assertEquals(artifactId1, artifactLevelMetadata.getArtifactId());
assertEquals(version2, artifactLevelMetadata.getVersioning().getLatest());
assertEquals(version2, artifactLevelMetadata.getVersioning().getRelease());
assertEquals(2, artifactLevelMetadata.getVersioning().getVersions().size());
assertNotNull(artifactLevelMetadata.getVersioning().getLastUpdated());
artifactLevelMetadata = defaultMavenArtifactDeployer.retrieveMetadata(
"storages/" + STORAGE0 + "/" + REPOSITORY_RELEASES2 + "/" +
ArtifactUtils.getArtifactLevelMetadataPath(artifact2));
assertNotNull(artifactLevelMetadata);
assertEquals(groupId, artifactLevelMetadata.getGroupId());
assertEquals(artifactId2, artifactLevelMetadata.getArtifactId());
assertEquals(version2, artifactLevelMetadata.getVersioning().getLatest());
assertEquals(version2, artifactLevelMetadata.getVersioning().getRelease());
assertEquals(2, artifactLevelMetadata.getVersioning().getVersions().size());
assertNotNull(artifactLevelMetadata.getVersioning().getLastUpdated());
artifactLevelMetadata = defaultMavenArtifactDeployer.retrieveMetadata(
"storages/" + STORAGE0 + "/" + REPOSITORY_RELEASES2 + "/" +
ArtifactUtils.getArtifactLevelMetadataPath(artifact5));
assertNotNull(artifactLevelMetadata);
assertEquals(groupId, artifactLevelMetadata.getGroupId());
assertEquals(artifactId3, artifactLevelMetadata.getArtifactId());
assertEquals(version2, artifactLevelMetadata.getVersioning().getLatest());
assertEquals(version2, artifactLevelMetadata.getVersioning().getRelease());
assertEquals(2, artifactLevelMetadata.getVersioning().getVersions().size());
assertNotNull(artifactLevelMetadata.getVersioning().getLastUpdated());
}
@Test
public void testUpdateMetadataOnDeleteReleaseVersionDirectory()
throws Exception
{
Assumptions.assumeTrue(repositoryIndexManager.isPresent());
// Given
String groupId = "org.carlspring.strongbox.delete-metadata";
String artifactId = "metadata-foo";
String version1 = "1.2.1";
String version2 = "1.2.2";
Artifact artifact1 = ArtifactUtils.getArtifactFromGAVTC(groupId + ":" + artifactId + ":" + version1);
Artifact artifact2 = ArtifactUtils.getArtifactFromGAVTC(groupId + ":" + artifactId + ":" + version2);
Artifact artifact3 = ArtifactUtils.getArtifactFromGAVTC(
groupId + ":" + artifactId + ":" + version2 + ":jar:javadoc");
MavenArtifactDeployer artifactDeployer = buildArtifactDeployer(GENERATOR_BASEDIR.toPath());
artifactDeployer.generateAndDeployArtifact(artifact1, STORAGE0, REPOSITORY_RELEASES2);
artifactDeployer.generateAndDeployArtifact(artifact2, STORAGE0, REPOSITORY_RELEASES2);
artifactDeployer.generateAndDeployArtifact(artifact3, STORAGE0, REPOSITORY_RELEASES2);
// Run a search against the index and get a list of all the artifacts matching this exact GAV
SearchRequest request = new SearchRequest(STORAGE0,
REPOSITORY_RELEASES2,
"+g:" + groupId + " " +
"+a:" + artifactId + " " +
"+v:" + "1.2.2",
MavenIndexerSearchProvider.ALIAS);
SearchResults results = artifactSearchService.search(request);
if (!results.getResults().isEmpty())
{
logger.debug("Found " + results.getResults()
.size() + " results in index of " +
STORAGE0 + ":" + REPOSITORY_RELEASES2 + IndexTypeEnum.LOCAL.getType() + ".");
}
for (SearchResult result : results.getResults())
{
String artifactPath = result.getArtifactCoordinates().toPath();
logger.debug(result.getArtifactCoordinates() + "(" + artifactPath + ")");
}
assertEquals(3,
results.getResults().size(),
"Incorrect number of results yielded from search against Maven Index!");
// When
String path = "org/carlspring/strongbox/delete-metadata/metadata-foo/1.2.2";
client.delete(STORAGE0, REPOSITORY_RELEASES2, path);
// Then
Metadata metadata = defaultMavenArtifactDeployer.retrieveMetadata(
"storages/" + STORAGE0 + "/" + REPOSITORY_RELEASES2 + "/" +
ArtifactUtils.getArtifactLevelMetadataPath(artifact1));
// Re-run the search and check, if the results are now different
results = artifactSearchService.search(request);
assertTrue(results.getResults()
.isEmpty(), "Failed to delete artifacts from Maven Index!!");
assertFalse(metadata.getVersioning()
.getVersions()
.contains("1.2.2"));
}
@Test
public void testUpdateMetadataOnDeleteSnapshotVersionDirectory()
throws NoSuchAlgorithmException,
XmlPullParserException,
IOException,
ArtifactOperationException,
ArtifactTransportException
{
// Given
String ga = "org.carlspring.strongbox.metadata:metadata-foo";
Artifact artifact1 = ArtifactUtils.getArtifactFromGAVTC(ga + ":3.1-SNAPSHOT");
Artifact artifact1WithTimestamp1 = ArtifactUtils.getArtifactFromGAVTC(
ga + ":" + createSnapshotVersion("3.1", 1));
Artifact artifact1WithTimestamp2 = ArtifactUtils.getArtifactFromGAVTC(
ga + ":" + createSnapshotVersion("3.1", 2));
Artifact artifact1WithTimestamp3 = ArtifactUtils.getArtifactFromGAVTC(
ga + ":" + createSnapshotVersion("3.1", 3));
Artifact artifact1WithTimestamp4 = ArtifactUtils.getArtifactFromGAVTC(
ga + ":" + createSnapshotVersion("3.1", 4));
MavenArtifactDeployer artifactDeployer = buildArtifactDeployer(GENERATOR_BASEDIR.toPath());
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp1, STORAGE0, REPOSITORY_SNAPSHOTS);
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp2, STORAGE0, REPOSITORY_SNAPSHOTS);
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp3, STORAGE0, REPOSITORY_SNAPSHOTS);
artifactDeployer.generateAndDeployArtifact(artifact1WithTimestamp4, STORAGE0, REPOSITORY_SNAPSHOTS);
String path = "org/carlspring/strongbox/metadata/metadata-foo/3.1-SNAPSHOT";
// When
client.delete(STORAGE0, REPOSITORY_SNAPSHOTS, path);
// Then
Metadata metadata = defaultMavenArtifactDeployer.retrieveMetadata(
"storages/" + STORAGE0 + "/" + REPOSITORY_SNAPSHOTS + "/" +
ArtifactUtils.getArtifactLevelMetadataPath(artifact1));
assertFalse(metadata.getVersioning()
.getVersions()
.contains("3.1-SNAPSHOT"));
}
private boolean checkSnapshotVersionExistsInMetadata(Metadata versionLevelMetadata,
String version,
String classifier,
String extension)
{
return versionLevelMetadata.getVersioning()
.getSnapshotVersions()
.stream()
.anyMatch(snapshotVersion -> snapshotVersion.getVersion().equals(version) &&
snapshotVersion.getClassifier().equals(classifier) &&
snapshotVersion.getExtension().equals(extension)
);
}
@Test
@WithUserDetails("developer01")
public void testDynamicPrivilegeAssignmentForRepository()
{
String url = getContextBaseUrl() + "/storages/" + STORAGE0 + "/" + REPOSITORY_RELEASES;
String pathToJar = "/org/carlspring/strongbox/test/dynamic-privileges/1.0/dynamic-privileges-1.0.jar";
String artifactPath = url + pathToJar;
|
package mp400;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
/**
*
* @author akeegazooka
*/
public class PPMFile {
ArrayList<Integer[]> imageData;
String format;
MP2d dimensions;
Integer maxValue;
String fileName;
public PPMFile(String inFileName) throws IOException
{
setFileName(inFileName);
loadFile();
}
private void setFileName(String inFileName)
{
fileName = inFileName;
}
private void loadFile() throws IOException
{
BufferedReader inputStream = null;
if(! (fileName.equals("") ) )
{
try
{
inputStream = new BufferedReader(new FileReader(fileName)) ;
String s;
boolean metaFull = false;
String dataString = "";
String[] dataElements;
String[] lineResult;
do {
while( (s = inputStream.readLine()) !=null)
{
lineResult = s.split("
if(! lineResult[0].equals("") )
{
dataString = dataString.concat(" " + lineResult[0]);
dataElements = dataString.split(" ");
if(dataElements.length == 5)
{
metaFull = true;
for(String sElement : dataElements)
{
System.out.println(sElement);
}
}
}
}
}while (!metaFull);
}
catch (IOException x)
{
System.out.println(x.getMessage());
}
finally
{
if(inputStream != null)
{
inputStream.close();
}
}
}
}
}
|
package io.lumify.securegraph.model.ontology;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import io.lumify.core.exception.LumifyException;
import io.lumify.core.model.ontology.*;
import io.lumify.core.model.properties.LumifyProperties;
import io.lumify.core.model.user.AuthorizationRepository;
import io.lumify.core.util.LumifyLogger;
import io.lumify.core.util.LumifyLoggerFactory;
import io.lumify.core.util.TimingCallable;
import org.json.JSONObject;
import org.securegraph.*;
import org.securegraph.property.StreamingPropertyValue;
import org.securegraph.util.ConvertingIterable;
import org.securegraph.util.FilterIterable;
import org.securegraph.util.IterableUtils;
import org.semanticweb.owlapi.io.OWLOntologyDocumentSource;
import org.semanticweb.owlapi.io.ReaderDocumentSource;
import org.semanticweb.owlapi.model.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.lumify.core.model.properties.LumifyProperties.*;
import static org.securegraph.util.IterableUtils.*;
@Singleton
public class SecureGraphOntologyRepository extends OntologyRepositoryBase {
private static final LumifyLogger LOGGER = LumifyLoggerFactory.getLogger(SecureGraphOntologyRepository.class);
public static final String ID_PREFIX = "ontology_";
public static final String ID_PREFIX_PROPERTY = ID_PREFIX + "prop_";
public static final String ID_PREFIX_RELATIONSHIP = ID_PREFIX + "rel_";
public static final String ID_PREFIX_CONCEPT = ID_PREFIX + "concept_";
private static final int QUERY_LIMIT = 10000;
private Graph graph;
private Authorizations authorizations;
private Cache<String, List<Concept>> allConceptsWithPropertiesCache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.HOURS)
.build();
private Cache<String, List<OntologyProperty>> allPropertiesCache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.HOURS)
.build();
private Cache<String, List<Relationship>> relationshipLabelsCache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.HOURS)
.build();
private Cache<String, JSONObject> jsonCache = CacheBuilder.newBuilder()
.expireAfterWrite(1, TimeUnit.HOURS)
.build();
@Inject
public SecureGraphOntologyRepository(final Graph graph,
final AuthorizationRepository authorizationRepository) {
this.graph = graph;
authorizationRepository.addAuthorizationToGraph(SecureGraphOntologyRepository.VISIBILITY_STRING);
Set<String> authorizationsSet = new HashSet<String>();
authorizationsSet.add(VISIBILITY_STRING);
this.authorizations = authorizationRepository.createAuthorizations(authorizationsSet);
if (!isOntologyDefined()) {
LOGGER.info("Base ontology not defined. Creating a new ontology.");
defineOntology(authorizations);
} else {
LOGGER.info("Base ontology already defined.");
}
}
@Override
public JSONObject getJson() {
JSONObject json = this.jsonCache.getIfPresent("json");
if (json != null) {
return json;
}
json = super.getJson();
this.jsonCache.put("json", json);
return json;
}
@Override
public void clearCache() {
LOGGER.info("clearing ontology cache");
graph.flush();
this.jsonCache.invalidateAll();
this.allConceptsWithPropertiesCache.invalidateAll();
this.allPropertiesCache.invalidateAll();
this.relationshipLabelsCache.invalidateAll();
}
@Override
protected void addEntityGlyphIconToEntityConcept(Concept entityConcept, byte[] rawImg) {
StreamingPropertyValue raw = new StreamingPropertyValue(new ByteArrayInputStream(rawImg), byte[].class);
raw.searchIndex(false);
entityConcept.setProperty(LumifyProperties.GLYPH_ICON.getPropertyName(), raw, authorizations);
graph.flush();
}
@Override
public void storeOntologyFile(InputStream in, IRI documentIRI) {
StreamingPropertyValue value = new StreamingPropertyValue(in, byte[].class);
value.searchIndex(false);
Map<String, Object> metadata = new HashMap<String, Object>();
Vertex rootConceptVertex = ((SecureGraphConcept) getRootConcept()).getVertex();
metadata.put("index", toList(rootConceptVertex.getProperties("ontologyFile")).size());
rootConceptVertex.addPropertyValue(documentIRI.toString(), "ontologyFile", value, metadata, VISIBILITY.getVisibility(), authorizations);
graph.flush();
}
@Override
public List<OWLOntology> loadOntologyFiles(OWLOntologyManager m, OWLOntologyLoaderConfiguration config, IRI excludedIRI) throws OWLOntologyCreationException, IOException {
List<OWLOntology> loadedOntologies = new ArrayList<OWLOntology>();
Iterable<Property> ontologyFiles = getOntologyFiles();
for (Property ontologyFile : ontologyFiles) {
IRI ontologyFileIRI = IRI.create(ontologyFile.getKey());
if (excludedIRI != null && excludedIRI.equals(ontologyFileIRI)) {
continue;
}
InputStream lumifyBaseOntologyIn = ((StreamingPropertyValue) ontologyFile.getValue()).getInputStream();
try {
Reader lumifyBaseOntologyReader = new InputStreamReader(lumifyBaseOntologyIn);
LOGGER.info("Loading existing ontology: %s", ontologyFile.getKey());
OWLOntologyDocumentSource lumifyBaseOntologySource = new ReaderDocumentSource(lumifyBaseOntologyReader, ontologyFileIRI);
try {
OWLOntology o = m.loadOntologyFromOntologyDocument(lumifyBaseOntologySource, config);
loadedOntologies.add(o);
} catch (UnloadableImportException ex) {
LOGGER.error("Could not load %s", ontologyFileIRI, ex);
}
} finally {
lumifyBaseOntologyIn.close();
}
}
return loadedOntologies;
}
private Iterable<Property> getOntologyFiles() {
List<Property> ontologyFiles = toList(((SecureGraphConcept) getRootConcept()).getVertex().getProperties("ontologyFile"));
Collections.sort(ontologyFiles, new Comparator<Property>() {
@Override
public int compare(Property ontologyFile1, Property ontologyFile2) {
Integer index1 = (Integer) ontologyFile1.getMetadata().get("index");
Integer index2 = (Integer) ontologyFile2.getMetadata().get("index");
return index1.compareTo(index2);
}
});
return ontologyFiles;
}
@Override
public Iterable<Relationship> getRelationshipLabels() {
try {
return relationshipLabelsCache.get("", new TimingCallable<List<Relationship>>("getRelationshipLabels") {
@Override
public List<Relationship> callWithTime() throws Exception {
Iterable<Vertex> vertices = graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_RELATIONSHIP)
.limit(QUERY_LIMIT)
.vertices();
return toList(new ConvertingIterable<Vertex, Relationship>(vertices) {
@Override
protected Relationship convert(Vertex vertex) {
Vertex sourceVertex = single(vertex.getVertices(Direction.IN, LabelName.HAS_EDGE.toString(), getAuthorizations()));
String sourceConceptIRI = ONTOLOGY_TITLE.getPropertyValue(sourceVertex);
Vertex destVertex = single(vertex.getVertices(Direction.OUT, LabelName.HAS_EDGE.toString(), getAuthorizations()));
String destConceptIRI = ONTOLOGY_TITLE.getPropertyValue(destVertex);
final List<String> inverseOfIRIs = getRelationshipInverseOfIRIs(vertex);
return new SecureGraphRelationship(vertex, sourceConceptIRI, destConceptIRI, inverseOfIRIs);
}
});
}
});
} catch (ExecutionException e) {
throw new LumifyException("Could not get relationship labels");
}
}
private List<String> getRelationshipInverseOfIRIs(final Vertex vertex) {
return IterableUtils.toList(new ConvertingIterable<Vertex, String>(vertex.getVertices(Direction.OUT, LabelName.INVERSE_OF.toString(), getAuthorizations())) {
@Override
protected String convert(Vertex inverseOfVertex) {
return LumifyProperties.ONTOLOGY_TITLE.getPropertyValue(inverseOfVertex);
}
});
}
@Override
public String getDisplayNameForLabel(String relationshipIRI) {
String displayName = null;
if (relationshipIRI != null && !relationshipIRI.trim().isEmpty()) {
try {
Relationship relationship = getRelationshipByIRI(relationshipIRI);
if (relationship != null) {
displayName = relationship.getDisplayName();
}
} catch (IllegalArgumentException iae) {
throw new IllegalStateException(String.format("Found multiple vertices for relationship label \"%s\"", relationshipIRI),
iae);
}
}
return displayName;
}
@Override
public Iterable<OntologyProperty> getProperties() {
try {
return allPropertiesCache.get("", new TimingCallable<List<OntologyProperty>>("getProperties") {
@Override
public List<OntologyProperty> callWithTime() throws Exception {
return toList(new ConvertingIterable<Vertex, OntologyProperty>(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_PROPERTY)
.limit(QUERY_LIMIT)
.vertices()) {
@Override
protected OntologyProperty convert(Vertex vertex) {
return new SecureGraphOntologyProperty(vertex);
}
});
}
});
} catch (ExecutionException e) {
throw new LumifyException("Could not get properties", e);
}
}
@Override
public OntologyProperty getProperty(String propertyIRI) {
try {
Vertex propVertex = singleOrDefault(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_PROPERTY)
.has(ONTOLOGY_TITLE.getPropertyName(), propertyIRI)
.limit(QUERY_LIMIT)
.vertices(), null);
return propVertex != null ? new SecureGraphOntologyProperty(propVertex) : null;
} catch (IllegalArgumentException iae) {
throw new IllegalStateException(String.format("Too many \"%s\" properties", propertyIRI), iae);
}
}
@Override
public Relationship getRelationshipByIRI(String relationshipIRI) {
Vertex relationshipVertex = singleOrDefault(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_RELATIONSHIP)
.has(ONTOLOGY_TITLE.getPropertyName(), relationshipIRI)
.limit(QUERY_LIMIT)
.vertices(), null);
if (relationshipVertex == null) {
return null;
}
String from;
String to;
try {
from = single(relationshipVertex.getVertexIds(Direction.IN, new String [] {LabelName.HAS_EDGE.toString()}, getAuthorizations()));
} catch (IllegalStateException ex) {
throw new IllegalStateException(String.format("Wrong number of 'IN' vertices for \"%s\"", relationshipIRI), ex);
}
try {
to = single(relationshipVertex.getVertexIds(Direction.OUT,new String [] {LabelName.HAS_EDGE.toString()}, getAuthorizations()));
} catch (IllegalStateException ex) {
throw new IllegalStateException(String.format("Wrong number of 'OUT' vertices for \"%s\"", relationshipIRI), ex);
}
List<String> inverseOfIRIs = getRelationshipInverseOfIRIs(relationshipVertex);
return new SecureGraphRelationship(relationshipVertex, from, to, inverseOfIRIs);
}
@Override
public Iterable<Concept> getConcepts() {
return getConcepts(false);
}
@Override
public Iterable<Concept> getConceptsWithProperties() {
try {
return allConceptsWithPropertiesCache.get("", new TimingCallable<List<Concept>>("getConceptsWithProperties") {
@Override
public List<Concept> callWithTime() throws Exception {
return toList(getConcepts(true));
}
});
} catch (ExecutionException e) {
throw new LumifyException("could not get concepts with properties", e);
}
}
private Iterable<Concept> getConcepts(final boolean withProperties) {
return new ConvertingIterable<Vertex, Concept>(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_CONCEPT)
.limit(QUERY_LIMIT)
.vertices()) {
@Override
protected Concept convert(Vertex vertex) {
if (withProperties) {
List<OntologyProperty> conceptProperties = getPropertiesByVertexNoRecursion(vertex);
Vertex parentConceptVertex = getParentConceptVertex(vertex);
String parentConceptIRI = ONTOLOGY_TITLE.getPropertyValue(parentConceptVertex);
return new SecureGraphConcept(vertex, parentConceptIRI, conceptProperties);
} else {
return new SecureGraphConcept(vertex);
}
}
};
}
private Concept getRootConcept() {
return getConceptByIRI(SecureGraphOntologyRepository.ROOT_CONCEPT_IRI);
}
@Override
public Concept getEntityConcept() {
return getConceptByIRI(SecureGraphOntologyRepository.ENTITY_CONCEPT_IRI);
}
private List<Concept> getChildConcepts(Concept concept) {
Vertex conceptVertex = ((SecureGraphConcept) concept).getVertex();
return toConcepts(conceptVertex.getVertices(Direction.IN, LabelName.IS_A.toString(), getAuthorizations()));
}
@Override
public Concept getParentConcept(final Concept concept) {
Vertex parentConceptVertex = getParentConceptVertex(((SecureGraphConcept) concept).getVertex());
if (parentConceptVertex == null) {
return null;
}
return new SecureGraphConcept(parentConceptVertex);
}
private List<Concept> toConcepts(Iterable<Vertex> vertices) {
ArrayList<Concept> concepts = new ArrayList<Concept>();
for (Vertex vertex : vertices) {
concepts.add(new SecureGraphConcept(vertex));
}
return concepts;
}
@Override
public Concept getConceptByIRI(String conceptIRI) {
// use the query API instead of the getVertex API to ensure we use the search index
// to ensure the ontology has been indexed.
Vertex conceptVertex = singleOrDefault(graph.query(getAuthorizations())
.has(CONCEPT_TYPE.getPropertyName(), TYPE_CONCEPT)
.has(ONTOLOGY_TITLE.getPropertyName(), conceptIRI)
.limit(QUERY_LIMIT)
.vertices(), null);
return conceptVertex != null ? new SecureGraphConcept(conceptVertex) : null;
}
private List<OntologyProperty> getPropertiesByVertexNoRecursion(Vertex vertex) {
return toList(new ConvertingIterable<Vertex, OntologyProperty>(vertex.getVertices(Direction.OUT, LabelName.HAS_PROPERTY.toString(), getAuthorizations())) {
@Override
protected OntologyProperty convert(Vertex o) {
return new SecureGraphOntologyProperty(o);
}
});
}
@Override
public List<Concept> getConceptAndChildrenByIRI(String conceptIRI) {
ArrayList<Concept> concepts = new ArrayList<Concept>();
Concept concept = getConceptByIRI(conceptIRI);
if (concept == null) {
return null;
}
concepts.add(concept);
List<Concept> children = getChildConcepts(concept);
concepts.addAll(children);
return concepts;
}
@Override
public List<Concept> getAllLeafNodesByConcept(Concept concept) {
List<Concept> childConcepts = getChildConcepts(concept);
List<Concept> parent = Lists.newArrayList(concept);
if (childConcepts.size() > 0) {
List<Concept> childrenList = new ArrayList<Concept>();
for (Concept childConcept : childConcepts) {
List<Concept> child = getAllLeafNodesByConcept(childConcept);
childrenList.addAll(child);
}
parent.addAll(childrenList);
}
return parent;
}
@Override
public Concept getOrCreateConcept(Concept parent, String conceptIRI, String displayName, File inDir) {
Concept concept = getConceptByIRI(conceptIRI);
if (concept != null) {
return concept;
}
VertexBuilder builder = graph.prepareVertex(ID_PREFIX_CONCEPT + conceptIRI, VISIBILITY.getVisibility());
CONCEPT_TYPE.setProperty(builder, TYPE_CONCEPT, VISIBILITY.getVisibility());
ONTOLOGY_TITLE.setProperty(builder, conceptIRI, VISIBILITY.getVisibility());
DISPLAY_NAME.setProperty(builder, displayName, VISIBILITY.getVisibility());
if (conceptIRI.equals(OntologyRepository.ENTITY_CONCEPT_IRI)) {
LumifyProperties.TITLE_FORMULA.setProperty(builder, "prop('http://lumify.io#title') || ''", VISIBILITY.getVisibility());
LumifyProperties.SUBTITLE_FORMULA.setProperty(builder, "prop('http://lumify.io#source') || ''", VISIBILITY.getVisibility());
LumifyProperties.TIME_FORMULA.setProperty(builder, "prop('http://lumify.io#publishedDate') || ''", VISIBILITY.getVisibility());
}
Vertex vertex = builder.save(getAuthorizations());
concept = new SecureGraphConcept(vertex);
if (parent != null) {
findOrAddEdge(((SecureGraphConcept) concept).getVertex(), ((SecureGraphConcept) parent).getVertex(), LabelName.IS_A.toString());
}
graph.flush();
return concept;
}
protected void findOrAddEdge(Vertex fromVertex, final Vertex toVertex, String edgeLabel) {
List<Vertex> matchingEdges = toList(new FilterIterable<Vertex>(fromVertex.getVertices(Direction.OUT, edgeLabel, getAuthorizations())) {
@Override
protected boolean isIncluded(Vertex vertex) {
return vertex.getId().equals(toVertex.getId());
}
});
if (matchingEdges.size() > 0) {
return;
}
String edgeId = fromVertex.getId() + "-" + toVertex.getId();
fromVertex.getGraph().addEdge(edgeId, fromVertex, toVertex, edgeLabel, VISIBILITY.getVisibility(), getAuthorizations());
}
@Override
public OntologyProperty addPropertyTo(
Concept concept,
String propertyIRI,
String displayName,
PropertyType dataType,
JSONObject possibleValues,
Collection<TextIndexHint> textIndexHints,
boolean userVisible,
boolean searchable,
Boolean displayTime,
Double boost) {
checkNotNull(concept, "vertex was null");
OntologyProperty property = getOrCreatePropertyType(concept, propertyIRI, dataType, displayName, possibleValues, textIndexHints, userVisible, searchable, displayTime, boost);
checkNotNull(property, "Could not find property: " + propertyIRI);
findOrAddEdge(((SecureGraphConcept) concept).getVertex(), ((SecureGraphOntologyProperty) property).getVertex(), LabelName.HAS_PROPERTY.toString());
graph.flush();
return property;
}
@Override
protected void getOrCreateInverseOfRelationship(Relationship fromRelationship, Relationship inverseOfRelationship) {
checkNotNull(fromRelationship, "fromRelationship is required");
checkNotNull(fromRelationship, "inverseOfRelationship is required");
SecureGraphRelationship fromRelationshipSg = (SecureGraphRelationship) fromRelationship;
SecureGraphRelationship inverseOfRelationshipSg = (SecureGraphRelationship) inverseOfRelationship;
Vertex fromVertex = fromRelationshipSg.getVertex();
checkNotNull(fromVertex, "fromVertex is required");
Vertex inverseVertex = inverseOfRelationshipSg.getVertex();
checkNotNull(inverseVertex, "inverseVertex is required");
findOrAddEdge(fromVertex, inverseVertex, LabelName.INVERSE_OF.toString());
findOrAddEdge(inverseVertex, fromVertex, LabelName.INVERSE_OF.toString());
}
@Override
public Relationship getOrCreateRelationshipType(Concept from, Concept to, String relationshipIRI, String displayName) {
Relationship relationship = getRelationshipByIRI(relationshipIRI);
if (relationship != null) {
return relationship;
}
VertexBuilder builder = graph.prepareVertex(ID_PREFIX_RELATIONSHIP + relationshipIRI + "-" + from.getIRI() + "-" + to.getIRI(), VISIBILITY.getVisibility());
CONCEPT_TYPE.setProperty(builder, TYPE_RELATIONSHIP, VISIBILITY.getVisibility());
ONTOLOGY_TITLE.setProperty(builder, relationshipIRI, VISIBILITY.getVisibility());
DISPLAY_NAME.setProperty(builder, displayName, VISIBILITY.getVisibility());
Vertex relationshipVertex = builder.save(getAuthorizations());
findOrAddEdge(((SecureGraphConcept) from).getVertex(), relationshipVertex, LabelName.HAS_EDGE.toString());
findOrAddEdge(relationshipVertex, ((SecureGraphConcept) to).getVertex(), LabelName.HAS_EDGE.toString());
List<String> inverseOfIRIs = new ArrayList<String>(); // no inverse of because this relationship is new
graph.flush();
return new SecureGraphRelationship(relationshipVertex, from.getTitle(), to.getTitle(), inverseOfIRIs);
}
private OntologyProperty getOrCreatePropertyType(
final Concept concept,
final String propertyName,
final PropertyType dataType,
final String displayName,
JSONObject possibleValues,
Collection<TextIndexHint> textIndexHints,
boolean userVisible,
boolean searchable,
Boolean displayTime,
Double boost) {
OntologyProperty typeProperty = getProperty(propertyName);
if (typeProperty == null) {
DefinePropertyBuilder definePropertyBuilder = graph.defineProperty(propertyName);
definePropertyBuilder.dataType(PropertyType.getTypeClass(dataType));
if (dataType == PropertyType.STRING) {
definePropertyBuilder.textIndexHint(textIndexHints);
}
if (boost != null) {
if (graph.isFieldBoostSupported()) {
definePropertyBuilder.boost(boost);
} else {
LOGGER.warn("Field boosting is not support by the graph");
}
}
definePropertyBuilder.define();
VertexBuilder builder = graph.prepareVertex(ID_PREFIX_PROPERTY + concept.getIRI() + "_" + propertyName, VISIBILITY.getVisibility());
CONCEPT_TYPE.setProperty(builder, TYPE_PROPERTY, VISIBILITY.getVisibility());
ONTOLOGY_TITLE.setProperty(builder, propertyName, VISIBILITY.getVisibility());
DATA_TYPE.setProperty(builder, dataType.toString(), VISIBILITY.getVisibility());
USER_VISIBLE.setProperty(builder, userVisible, VISIBILITY.getVisibility());
SEARCHABLE.setProperty(builder, searchable, VISIBILITY.getVisibility());
if (displayTime != null) {
DISPLAY_TIME.setProperty(builder, displayTime, VISIBILITY.getVisibility());
}
if (boost != null) {
BOOST.setProperty(builder, boost, VISIBILITY.getVisibility());
}
if (displayName != null && !displayName.trim().isEmpty()) {
DISPLAY_NAME.setProperty(builder, displayName.trim(), VISIBILITY.getVisibility());
}
if (possibleValues != null) {
POSSIBLE_VALUES.setProperty(builder, possibleValues, VISIBILITY.getVisibility());
}
typeProperty = new SecureGraphOntologyProperty(builder.save(getAuthorizations()));
graph.flush();
}
return typeProperty;
}
private Vertex getParentConceptVertex(Vertex conceptVertex) {
try {
return singleOrDefault(conceptVertex.getVertices(Direction.OUT, LabelName.IS_A.toString(), getAuthorizations()), null);
} catch (IllegalArgumentException iae) {
throw new IllegalStateException(String.format("Unexpected number of parents for concept %s",
TITLE.getPropertyValue(conceptVertex)), iae);
}
}
private Authorizations getAuthorizations() {
return authorizations;
}
}
|
package org.jetel.connection.jdbc.specific.impl;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collection;
import java.util.Properties;
import org.jetel.connection.jdbc.specific.conn.MSAccessPureJavaConnection;
import org.jetel.database.sql.DBConnection;
import org.jetel.database.sql.SqlConnection;
import org.jetel.exception.ConfigurationProblem;
import org.jetel.exception.ConfigurationStatus;
import org.jetel.exception.JetelException;
import org.jetel.graph.Node;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.metadata.DataFieldType;
import org.jetel.metadata.DataRecordMetadata;
public class MSAccessPureJavaSpecific extends AbstractJdbcSpecific {
private static final MSAccessPureJavaSpecific INSTANCE = new MSAccessPureJavaSpecific();
private static final String CONVERT_STRING = "Convert the field to another type or use another matching field type.";
private static final String SHOW_SCHEMA_PROPERTY = "showschema";
protected MSAccessPureJavaSpecific() {
super();
}
public static MSAccessPureJavaSpecific getInstance() {
return INSTANCE;
}
@Override
public Connection connect(Driver driver, String url, Properties info) throws SQLException {
boolean schemaPropertyPresent = url.toLowerCase().replaceAll("\\s", "").contains(SHOW_SCHEMA_PROPERTY + "=");
if (!schemaPropertyPresent) {
// search the custom properties
for (Object key : info.keySet()) {
if (key.toString().toLowerCase().equals(SHOW_SCHEMA_PROPERTY)) {
schemaPropertyPresent = true;
break;
}
}
}
if (!schemaPropertyPresent) {
info.put(SHOW_SCHEMA_PROPERTY, "true"); // needed for browsing, metadata extraction
}
return super.connect(driver, url, info);
}
@Override
public ClassLoader getDriverClassLoaderParent() {
// ucanaccess needs commons-logging that is on clover classpath
return Thread.currentThread().getContextClassLoader();
}
@Override
public SqlConnection createSQLConnection(DBConnection dbConnection, Connection connection, OperationType operationType) throws JetelException {
return new MSAccessPureJavaConnection(dbConnection, connection, operationType);
}
@Override
public String getDbFieldPattern() {
// allows white spaces
return "([\\s\\p{Alnum}\\._]+)|([\"\'][\\s\\p{Alnum}\\._ ]+[\"\'])";
}
@Override
public String quoteString(String string) {
return quoteIdentifier(string);
}
@Override
public String quoteIdentifier(String identifier) {
return ('[' + identifier + ']');
}
@Override
public ConfigurationStatus checkMetadata(ConfigurationStatus status, Collection<DataRecordMetadata> metadata, Node node) {
for (DataRecordMetadata dataRecordMetadata : metadata) {
for (DataFieldMetadata dataField : dataRecordMetadata.getFields()) {
switch (dataField.getDataType()) {
case LONG:
status.add(new ConfigurationProblem("Metadata on input port must not use field of type long " + "because of restrictions of used driver." + CONVERT_STRING, ConfigurationStatus.Severity.ERROR, node, ConfigurationStatus.Priority.NORMAL));
break;
default:
break;
}
}
}
return status;
}
@Override
public String sqlType2str(int sqlType) {
switch (sqlType) {
case Types.TIMESTAMP:
return "DATETIME";
}
return super.sqlType2str(sqlType);
}
@Override
public int jetelType2sql(DataFieldMetadata field) {
switch (field.getDataType()) {
case NUMBER:
return Types.DOUBLE;
default:
return super.jetelType2sql(field);
}
}
@Override
public char sqlType2jetel(int sqlType) {
switch (sqlType) {
case Types.BIT:
return DataFieldType.BOOLEAN.getShortName();
default:
return super.sqlType2jetel(sqlType);
}
}
@Override
public String getTablePrefix(String schema, String owner, boolean quoteIdentifiers) {
String tablePrefix;
String notNullOwner = (owner == null) ? "" : owner;
if (quoteIdentifiers) {
tablePrefix = quoteIdentifier(schema);
// in case when owner is empty or null skip adding
if (!notNullOwner.isEmpty()) {
tablePrefix += quoteIdentifier(notNullOwner);
}
} else {
tablePrefix = notNullOwner.isEmpty() ? schema : (schema + "." + notNullOwner);
}
return tablePrefix;
}
}
|
package io.cattle.platform.engine.idempotent;
import io.cattle.platform.archaius.util.ArchaiusUtil;
import io.cattle.platform.util.exception.ExceptionUtils;
import java.util.HashSet;
import java.util.Set;
import org.apache.cloudstack.managed.threadlocal.ManagedThreadLocal;
import org.apache.commons.lang3.ObjectUtils;
import com.netflix.config.DynamicBooleanProperty;
public class Idempotent {
private static final String DISABLE = "_disable";
private static final String IN_EXCEPTION = "_inexception";
private static final int LOOP_MAX = 1000;
private static final DynamicBooleanProperty RUN_MULTIPLE_TIMES = ArchaiusUtil.getBoolean("idempotent.reexecute");
private static final DynamicBooleanProperty ABORT_ON_CHANGE = ArchaiusUtil.getBoolean("idempotent.abort.on.change");
private static final ThreadLocal<Set<String>> IDEMPOTENT = new ManagedThreadLocal<Set<String>>();
public static <T> T execute(IdempotentExecution<T> execution) {
Set<String> traces = null;
try {
if (ABORT_ON_CHANGE.get()) {
traces = new HashSet<String>();
if (IDEMPOTENT.get() == null) {
IDEMPOTENT.set(traces);
}
}
T result = null;
outer: for (int i = 0; i < 2; i++) {
for (int j = 0; j < LOOP_MAX; j++) {
try {
T resultAgain = execution.execute();
if (i == 0) {
result = resultAgain;
}
if (isDisabled(traces) || isNested(traces) || !RUN_MULTIPLE_TIMES.get()) {
break outer;
}
if (!ObjectUtils.equals(result, resultAgain)) {
throw new OperationNotIdemponent("Result [" + result + "] does not match second result [" + resultAgain + "]");
}
break;
} catch (IdempotentRetryException e) {
if (IDEMPOTENT.get() != traces)
throw e;
IDEMPOTENT.get().remove(IN_EXCEPTION);
if (j == LOOP_MAX - 1) {
throw new IllegalStateException("Executed [" + execution + "] " + LOOP_MAX + " times and never completed traces [" + traces + "]");
}
}
}
}
return result;
} finally {
if (traces != null && !isNested(traces)) {
IDEMPOTENT.remove();
}
}
}
protected static boolean isNested(Set<String> traces) {
return IDEMPOTENT.get() != traces;
}
public static <T> T change(IdempotentExecution<T> execution) {
Set<String> traces = IDEMPOTENT.get();
if (traces != null && !isDisabled(traces)) {
IdempotentRetryException e = new IdempotentRetryException();
String trace = ExceptionUtils.toString(e);
if (!traces.contains(trace)) {
traces.add(trace);
if (!IDEMPOTENT.get().contains(IN_EXCEPTION)) {
IDEMPOTENT.get().add(IN_EXCEPTION);
throw e;
}
}
}
return execution.execute();
}
protected static boolean isDisabled(Set<String> traces) {
return traces == null || traces.contains(DISABLE);
}
public static void disable(Runnable runnable) {
Set<String> traces = IDEMPOTENT.get();
boolean alreadyDisabled = traces != null && traces.contains(DISABLE);
if (!alreadyDisabled && traces != null) {
traces.add(DISABLE);
}
try {
runnable.run();
} finally {
if (!alreadyDisabled && traces != null) {
traces.remove(DISABLE);
}
}
}
public static void tempDisable() {
Set<String> traces = IDEMPOTENT.get();
if (traces != null) {
traces.add(DISABLE);
}
}
}
|
package com.common.android.utils.storage;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.orhanobut.hawk.Hawk;
public class CachedHawkValue<T> {
@NonNull
private final String key;
@Nullable
private T cachedValue;
private boolean isDirty;
public CachedHawkValue(@NonNull final String key) {
this.key = key;
isDirty = true;
}
public CachedHawkValue(@NonNull final String key, @NonNull final T value) {
Hawk.put(key, value);
this.key = key;
isDirty = false;
cachedValue = value;
}
@Nullable
public T get() {
if (isDirty || cachedValue == null) {
cachedValue = Hawk.get(key);
isDirty = false;
}
return cachedValue;
}
public void set(@Nullable final T value) {
Hawk.put(key, value);
cachedValue = value;
invalidate();
}
public void invalidate() {
isDirty = true;
}
}
|
package org.opendaylight.yangtools.yang.common;
import static java.util.Objects.requireNonNull;
import com.google.common.base.MoreObjects;
import com.google.common.collect.Interner;
import com.google.common.collect.Interners;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.Serializable;
import java.util.Objects;
import java.util.Optional;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import org.opendaylight.yangtools.concepts.Identifier;
import org.opendaylight.yangtools.concepts.Immutable;
import org.opendaylight.yangtools.concepts.WritableObject;
/**
* A {@link XMLNamespace} bound to a particular model {@link Revision}. This is the primary way of identifying a YANG
* module namespace within an effective model world. The reason for this is that we support coexistence of multiple
* module revisions and hence cannot use plain module name or namespace to address them.
*/
public final class QNameModule implements Comparable<QNameModule>, Immutable, Serializable, Identifier, WritableObject {
private static final Interner<QNameModule> INTERNER = Interners.newWeakInterner();
private static final long serialVersionUID = 3L;
private final @NonNull XMLNamespace namespace;
private final @Nullable Revision revision;
private transient int hash = 0;
private QNameModule(final XMLNamespace namespace, final @Nullable Revision revision) {
this.namespace = requireNonNull(namespace);
this.revision = revision;
}
/**
* Return an interned reference to a equivalent QNameModule.
*
* @return Interned reference, or this object if it was interned.
*/
public @NonNull QNameModule intern() {
return INTERNER.intern(this);
}
/**
* Create a new QName module instance with specified namespace/revision.
*
* @param namespace Module namespace
* @param revision Module revision
* @return A new, potentially shared, QNameModule instance
* @throws NullPointerException if any argument is null
*/
public static @NonNull QNameModule create(final XMLNamespace namespace, final Optional<Revision> revision) {
return new QNameModule(namespace, revision.orElse(null));
}
/**
* Create a new QName module instance with specified namespace and no revision.
*
* @param namespace Module namespace
* @return A new, potentially shared, QNameModule instance
* @throws NullPointerException if {@code namespace} is null
*/
public static @NonNull QNameModule create(final XMLNamespace namespace) {
return new QNameModule(namespace, null);
}
/**
* Create a new QName module instance with specified namespace/revision.
*
* @param namespace Module namespace
* @param revision Module revision
* @return A new, potentially shared, QNameModule instance
* @throws NullPointerException if any argument is null
*/
public static @NonNull QNameModule create(final XMLNamespace namespace, final @Nullable Revision revision) {
return new QNameModule(namespace, revision);
}
/**
* Read a QNameModule from a DataInput. The format is expected to match the output format
* of {@link #writeTo(DataOutput)}.
*
* @param in DataInput to read
* @return A QNameModule instance
* @throws IOException if I/O error occurs
*/
public static @NonNull QNameModule readFrom(final DataInput in) throws IOException {
final String namespace = in.readUTF();
final String revision = in.readUTF();
return new QNameModule(XMLNamespace.of(namespace), revision.isEmpty() ? null : Revision.of(revision));
}
/**
* Returns the namespace of the module which is specified as argument of YANG Module {@code namespace} keyword.
*
* @return XMLNamespace of the namespace of the module
*/
public @NonNull XMLNamespace getNamespace() {
return namespace;
}
/**
* Returns the revision date for the module.
*
* @return date of the module revision which is specified as argument of YANG Module {@code revision} keyword
*/
public @NonNull Optional<Revision> getRevision() {
return Optional.ofNullable(revision);
}
@Override
@SuppressWarnings("checkstyle:parameterName")
public int compareTo(final QNameModule o) {
int cmp = namespace.compareTo(o.namespace);
if (cmp != 0) {
return cmp;
}
return Revision.compare(revision, o.revision);
}
/**
* Returns a QNameModule with the same namespace, but with no revision. If this QNameModule does not have
* a revision, this object is returned.
*
* @return a QNameModule with the same namespace, but with no revision.
*/
public @NonNull QNameModule withoutRevision() {
return revision == null ? this : new QNameModule(namespace, null);
}
@Override
public void writeTo(final DataOutput out) throws IOException {
out.writeUTF(namespace.toString());
out.writeUTF(revision == null ? "" : revision.toString());
}
@Override
public int hashCode() {
if (hash == 0) {
hash = Objects.hash(namespace, revision);
}
return hash;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof QNameModule)) {
return false;
}
final QNameModule other = (QNameModule) obj;
return Objects.equals(revision, other.revision) && namespace.equals(other.namespace);
}
@Override
public @NonNull String toString() {
return MoreObjects.toStringHelper(QNameModule.class).omitNullValues()
.add("ns", namespace)
.add("rev", revision)
.toString();
}
Object writeReplace() {
return new NSv1(this);
}
}
|
package org.eclipse.kapua.commons.configuration.metatype;
import org.junit.Assert;
import org.junit.Test;
public class PasswordTest extends Assert {
@Test
public void testGetPassword() throws Exception{
Password password = new Password("pass");
Password password2 = new Password(null);
Assert.assertEquals("pass", password.getPassword());
Assert.assertNull(password2.getPassword());
}
@Test
public void testToString() {
Password password = new Password("pass");
Password password2 = new Password(null);
Assert.assertEquals("pass",password.toString());
Assert.assertNull(password2.getPassword());
}
}
|
package com.yahoo.searchdefinition.processing;
import com.yahoo.config.application.api.DeployLogger;
import com.yahoo.searchdefinition.RankProfile;
import com.yahoo.searchdefinition.RankProfileRegistry;
import com.yahoo.searchdefinition.Search;
import com.yahoo.searchlib.rankingexpression.parser.RankingExpressionParserConstants;
import com.yahoo.vespa.model.container.search.QueryProfiles;
import java.util.HashSet;
import java.util.Set;
import java.util.logging.Level;
/**
* Issues a warning if some macro has a reserved name. This is not necessarily
* an error, as a macro can shadow a built-in function.
*
* @author lesters
*/
public class ReservedMacroNames extends Processor {
private static Set<String> reservedNames = getReservedNames();
public ReservedMacroNames(Search search, DeployLogger deployLogger, RankProfileRegistry rankProfileRegistry, QueryProfiles queryProfiles) {
super(search, deployLogger, rankProfileRegistry, queryProfiles);
}
@Override
public void process() {
for (RankProfile rp : rankProfileRegistry.allRankProfiles()) {
for (String macroName : rp.getMacros().keySet()) {
if (reservedNames.contains(macroName)) {
deployLogger.log(Level.WARNING, "Macro \"" + macroName + "\" " +
"in rank profile \"" + rp.getName() + "\" " +
"has a reserved name. This might mean that the macro shadows " +
"the built-in function with the same name."
);
}
}
}
}
private static Set<String> getReservedNames() {
Set<String> names = new HashSet<>();
for (String token : RankingExpressionParserConstants.tokenImage) {
String tokenWithoutQuotes = token.substring(1, token.length()-1);
names.add(tokenWithoutQuotes);
}
return names;
}
}
|
package robot;
// Default libraries
import java.util.Set;
import java.util.Stack;
import java.util.Vector;
import java.util.HashMap;
import java.util.Iterator;
// Libraries
import robot.*;
import exception.*;
import stackable.*;
/**
* Main class with the constructor of the robot and its data.
* @author Renato Cordeiro Ferreira
*/
public class RVM
{
Vector <Command> PROG;
Stack <Stackable> DATA = new Stack <Stackable> ();
Vector <Integer> CTRL = new Vector <Integer> ();
HashMap <String, Integer> LABEL = new HashMap <String, Integer>();
HashMap <Integer, Stackable> RAM = new HashMap <Integer, Stackable>();
int PC = 0;
/**
* Class constructor specifying a 'program' (vector of
* objects of the class Command) to the RVM.
*
* @param PROG Vector of objects of the class Command
* @see Command
*/
public RVM(Vector <Command> PROG)
{
this.PROG = PROG; upload_labels();
}
/**
* Setter method created to upload a new 'program' (vector
* of object of the class Command) in the RVM.
*
* @param PROG Vector of objects of the class Command
* @see Command
*/
public void upload(Vector <Command> PROG)
{
this.PROG = PROG; upload_labels();
}
/**
* Execute 1 assembly instruction.
*
* @throws SegmentationFaultException
* @throws InvalidOperationException
* @throws StackUnderflowException,
* @throws NoLabelFoundException,
* @throws OutOfBoundsException,
* @throws WrongTypeException
*/
public void exec()
throws SegmentationFaultException,
InvalidOperationException,
StackUnderflowException,
NoLabelFoundException,
OutOfBoundsException,
WrongTypeException
{
Command com = this.PROG.elementAt(this.PC);
String function = com.getFunction ();
Stackable arg = com.getAttribute ();
// Call function
if(function != null)
{
try { Function.call(this, function, arg); }
catch (Exception e) {
System.out.print(e);
}
}
}
/**
* Function responsible for executing the 'program', step by
* step, untill it ends.
*
* @throws SegmentationFaultException
* @throws InvalidOperationException
* @throws StackUnderflowException,
* @throws NoLabelFoundException,
* @throws OutOfBoundsException,
* @throws WrongTypeException
*/
public void ctrl()
throws SegmentationFaultException,
InvalidOperationException,
StackUnderflowException,
NoLabelFoundException,
OutOfBoundsException,
WrongTypeException
{
for(this.PC = 0; this.PC != -1; this.PC++) exec();
}
/**
* Function responsible for uploading the labels of PROG,
* doint it if and only if the program is new.
*/
private void upload_labels()
{
this.LABEL.clear();
for(int i = 0 ;; i++)
{
Command c = this.PROG.elementAt(i);
if(c == null) break;
// Upload labels to HashMap.
// The values are the position -1, to be able to
// increment in each iteration of a for loop.
if(c.getLabel() != null) this.LABEL.put(c.getLabel(), i-1);
}
}
}
|
package net.loxal.muctool;
public class Test {
public static void main(final String... args) {
final var placeholder = "placeholder";
System.out.println("placeholder: " + placeholder);
}
}
|
package org.spine3.server.storage.filesystem;
import com.google.protobuf.Any;
import com.google.protobuf.Message;
import org.spine3.protobuf.Messages;
import org.spine3.server.storage.EntityStorage;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Paths;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Throwables.propagate;
import static org.spine3.protobuf.Messages.toAny;
import static org.spine3.server.storage.filesystem.FsUtil.idToStringWithEscaping;
import static org.spine3.util.IoUtil.*;
/**
* An entity storage based on the file system.
*
* @author Alexander Litus
*/
class FsEntityStorage<I, M extends Message> extends EntityStorage<I, M> {
private static final String ENTITY_STORE_DIR_NAME = "/entity-store/";
private final String entityStorageRootPath;
/**
* Creates a new storage instance.
* @param rootDirectoryPath an absolute path to the root storage directory (without the delimiter at the end)
*/
protected static <I, M extends Message> EntityStorage<I, M> newInstance(String rootDirectoryPath) {
return new FsEntityStorage<>(rootDirectoryPath);
}
private FsEntityStorage(String rootDirectoryPath) {
this.entityStorageRootPath = rootDirectoryPath + ENTITY_STORE_DIR_NAME;
}
@Override
public M read(I id) {
final String idString = idToStringWithEscaping(id);
final String filePath = createEntityFilePath(idString);
final File file = tryCreateIfDoesNotExist(filePath);
Message message = null;
if (file.exists()) {
message = readMessage(file);
}
@SuppressWarnings("unchecked") // We ensure type by writing this kind of messages.
final M result = (M) message;
return result;
}
@Override
@SuppressWarnings("DuplicateStringLiteralInspection")
public void write(I id, M message) {
checkNotNull(id, "id");
checkNotNull(message, "message");
final String idString = idToStringWithEscaping(id);
final String filePath = createEntityFilePath(idString);
deleteIfExists(Paths.get(filePath));
final File file = tryCreateIfDoesNotExist(filePath);
final Any any = toAny(message);
FsUtil.writeMessage(file, any);
}
/**
* Reads {@link com.google.protobuf.Message} from {@link java.io.File}.
*
* @param file the {@link java.io.File} to read from.
* @return the message parsed from the file or {@code null}
*/
private static Message readMessage(File file) {
checkFileExists(file, "entity storage");
final InputStream fileInputStream = open(file);
final InputStream bufferedInputStream = new BufferedInputStream(fileInputStream);
Any any = Any.getDefaultInstance();
try {
any = Any.parseDelimitedFrom(bufferedInputStream);
} catch (IOException e) {
throw new RuntimeException("Failed to read message from file: " + file.getAbsolutePath(), e);
} finally {
closeSilently(fileInputStream, bufferedInputStream);
}
final Message result = (any != null) ? Messages.fromAny(any) : null;
return result;
}
private String createEntityFilePath(String entityId) {
return entityStorageRootPath + entityId;
}
private static File tryCreateIfDoesNotExist(String filePath) {
try {
return createIfDoesNotExist(filePath);
} catch (IOException e) {
throw propagate(e);
}
}
}
|
package org.cytoscape.task.internal.export.table;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.io.FilenameUtils;
import org.cytoscape.application.CyApplicationManager;
import org.cytoscape.io.CyFileFilter;
import org.cytoscape.io.write.CyTableWriterManager;
import org.cytoscape.io.write.CyTableWriterFactory;
import org.cytoscape.io.write.CyWriter;
import org.cytoscape.model.CyNetwork;
import org.cytoscape.model.CyTable;
import org.cytoscape.task.internal.export.TunableAbstractCyWriter;
import org.cytoscape.work.ProvidesTitle;
import org.cytoscape.work.Tunable;
import org.cytoscape.work.util.ListSingleSelection;
/**
* A utility Task implementation specifically for writing {@link org.cytoscape.model.CyTable} objects.
*/
public final class CyTableWriter extends TunableAbstractCyWriter<CyTableWriterFactory,CyTableWriterManager> {
private final CyTable table;
/**
* @param writerManager The {@link org.cytoscape.io.write.CyTableWriterManager} used to determine which
* {@link org.cytoscape.io.write.CyTableWriterFactory} to use to write the file.
* @param table The {@link org.cytoscape.model.CyTable} to be written out.
*/
public CyTableWriter(final CyTableWriterManager writerManager, final CyApplicationManager cyApplicationManager,
final CyTable table) {
super(writerManager, cyApplicationManager);
if (table == null)
throw new NullPointerException("Table is null");
this.table = table;
List<String> fileTypes = options.getPossibleValues();
for (Iterator<String> i = fileTypes.iterator(); i.hasNext();) {
if (i.next().contains(".cytable"))
i.remove();
}
options.setPossibleValues(fileTypes);
this.outputFile = getSuggestedFile();
}
void setDefaultFileFormatUsingFileExt(final File file) {
String ext = FilenameUtils.getExtension(file.getName());
ext = ext.toLowerCase().trim();
String searchDesc = "*." + ext;
// Use the EXT to determine the default file format
for (String fileTypeDesc : this.getFileFilterDescriptions())
if (fileTypeDesc.contains(searchDesc)) {
options.setSelectedValue(fileTypeDesc);
break;
}
}
@Override
protected CyWriter getWriter(final CyFileFilter filter) throws Exception {
return writerManager.getWriter(table, filter, outputStream);
}
@Tunable(description="Save Table as:", params="fileCategory=table;input=false", dependsOn="options!=", gravity = 1.1)
public File getOutputFile() {
return outputFile;
}
@ProvidesTitle
public String getTitle() {
return "Export Table";
}
@Override
protected String getExportName() {
return table.getTitle();
}
}
|
package org.eclipse.birt.core.archive.compound;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.birt.core.archive.compound.v3.Ext2Entry;
import org.eclipse.birt.core.archive.compound.v3.Ext2File;
import org.eclipse.birt.core.archive.compound.v3.Ext2FileSystem;
import org.eclipse.birt.core.archive.compound.v3.Ext2Node;
public class ArchiveFileV3 implements IArchiveFile
{
public static final String PROPERTY_SYSTEM_ID = "archive.system-id";
public static final String PROPERTY_DEPEND_ID = "archive.depened-id";
protected Ext2FileSystem fs;
public ArchiveFileV3( String fileName, String mode ) throws IOException
{
this( fileName, null, mode );
}
public ArchiveFileV3( String fileName, RandomAccessFile rf, String mode )
throws IOException
{
fs = new Ext2FileSystem( fileName, rf, mode );
if ( ArchiveFile.enableSystemCache )
{
fs.setCacheManager( ArchiveFile.systemCacheManager );
}
if ( ArchiveFile.enableFileCache && fs.isRemoveOnExit( ) )
{
fs.setCacheSize( ArchiveFile.FILE_CACHE_SIZE * 4096 );
}
}
public void close( ) throws IOException
{
if ( fs != null )
{
fs.close( );
fs = null;
}
}
public void setSystemId( String id )
{
fs.setProperty( PROPERTY_SYSTEM_ID, id );
}
public void setDependId( String id )
{
fs.setProperty( PROPERTY_DEPEND_ID, id );
}
public ArchiveEntry createEntry( String name ) throws IOException
{
Ext2File file = fs.createFile( name );
return new ArchiveEntryV3( file );
}
public boolean exists( String name )
{
return fs.existFile( name );
}
public void flush( ) throws IOException
{
fs.flush( );
}
public String getDependId( )
{
return fs.getProperty( PROPERTY_DEPEND_ID );
}
public ArchiveEntry openEntry( String name ) throws IOException
{
if ( fs.existFile( name ) )
{
Ext2File file = fs.openFile( name );
return new ArchiveEntryV3( file );
}
throw new FileNotFoundException( name );
}
public String getName( )
{
return fs.getFileName( );
}
public String getSystemId( )
{
return fs.getProperty( PROPERTY_SYSTEM_ID );
}
public long getUsedCache( )
{
return (long) fs.getUsedCacheSize( ) * 4096;
}
public List listEntries( String namePattern )
{
ArrayList<String> files = new ArrayList<String>( );
for ( String file : fs.listFiles( ) )
{
if ( file.startsWith( namePattern ) )
{
files.add( file );
}
}
return files;
}
public synchronized Object lockEntry( String name ) throws IOException
{
if ( !fs.existFile( name ) )
{
if ( !fs.isReadOnly( ) )
{
Ext2File file = fs.createFile( name );
file.close( );
}
}
Ext2Entry entry = fs.getEntry( name );
if ( entry != null )
{
return entry;
}
throw new FileNotFoundException( name );
}
public void refresh( ) throws IOException
{
}
public boolean removeEntry( String name ) throws IOException
{
fs.removeFile( name );
return true;
}
public void save( ) throws IOException
{
fs.setRemoveOnExit( false );
fs.flush( );
}
public void setCacheSize( long cacheSize )
{
long cacheBlock = cacheSize / 4096;
if ( cacheBlock > Integer.MAX_VALUE )
{
fs.setCacheSize( Integer.MAX_VALUE );
}
else
{
fs.setCacheSize( (int) cacheBlock );
}
}
synchronized public void unlockEntry( Object locker ) throws IOException
{
assert ( locker instanceof Ext2Entry );
}
private static class ArchiveEntryV3 extends ArchiveEntry
{
Ext2File file;
ArchiveEntryV3( Ext2File file )
{
this.file = file;
}
public String getName( )
{
return file.getName( );
}
public long getLength( ) throws IOException
{
return file.length( );
}
public void close( ) throws IOException
{
file.close( );
}
@Override
public void flush( ) throws IOException
{
}
@Override
public int read( long pos, byte[] b, int off, int len )
throws IOException
{
file.seek( pos );
return file.read( b, off, len );
}
@Override
public void refresh( ) throws IOException
{
}
@Override
public void setLength( long length ) throws IOException
{
file.setLength( length );
}
@Override
public void write( long pos, byte[] b, int off, int len )
throws IOException
{
file.seek( pos );
file.write( b, off, len );
}
}
}
|
/* Generated By:JJTree: Do not edit this line. OIsNotNullCondition.java Version 4.3 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=true,TRACK_TOKENS=true,NODE_PREFIX=O,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package com.orientechnologies.orient.core.sql.parser;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.sql.executor.OResult;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class OIsNotNullCondition extends OBooleanExpression {
protected OExpression expression;
public OIsNotNullCondition(int id) {
super(id);
}
public OIsNotNullCondition(OrientSql p, int id) {
super(p, id);
}
@Override
public boolean evaluate(OIdentifiable currentRecord, OCommandContext ctx) {
return expression.execute(currentRecord, ctx) != null;
}
@Override
public boolean evaluate(OResult currentRecord, OCommandContext ctx) {
if (expression.isFunctionAny()) {
return evaluateAny(currentRecord, ctx);
}
if (expression.isFunctionAll()) {
return evaluateAllFunction(currentRecord, ctx);
}
return expression.execute(currentRecord, ctx) != null;
}
private boolean evaluateAny(OResult currentRecord, OCommandContext ctx) {
for (String s : currentRecord.getPropertyNames()) {
Object leftVal = currentRecord.getProperty(s);
if (!(leftVal == null)) {
return true;
}
}
return false;
}
private boolean evaluateAllFunction(OResult currentRecord, OCommandContext ctx) {
for (String s : currentRecord.getPropertyNames()) {
Object leftVal = currentRecord.getProperty(s);
if (leftVal == null) {
return false;
}
}
return true;
}
public void toString(Map<Object, Object> params, StringBuilder builder) {
expression.toString(params, builder);
builder.append(" IS NOT NULL");
}
@Override
public boolean supportsBasicCalculation() {
return expression.supportsBasicCalculation();
}
@Override
protected int getNumberOfExternalCalculations() {
if (!expression.supportsBasicCalculation()) {
return 1;
}
return 0;
}
@Override
protected List<Object> getExternalCalculationConditions() {
if (!expression.supportsBasicCalculation()) {
return (List) Collections.singletonList(expression);
}
return Collections.EMPTY_LIST;
}
@Override
public boolean needsAliases(Set<String> aliases) {
return expression.needsAliases(aliases);
}
@Override
public OBooleanExpression copy() {
OIsNotNullCondition result = new OIsNotNullCondition(-1);
result.expression = expression.copy();
return result;
}
@Override
public void extractSubQueries(SubQueryCollector collector) {
this.expression.extractSubQueries(collector);
}
@Override
public boolean refersToParent() {
if (expression != null && expression.refersToParent()) {
return true;
}
return false;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
OIsNotNullCondition that = (OIsNotNullCondition) o;
if (expression != null ? !expression.equals(that.expression) : that.expression != null)
return false;
return true;
}
@Override
public int hashCode() {
return expression != null ? expression.hashCode() : 0;
}
@Override
public List<String> getMatchPatternInvolvedAliases() {
return expression.getMatchPatternInvolvedAliases();
}
@Override
public boolean isCacheable() {
return expression.isCacheable();
}
}
/* JavaCC - OriginalChecksum=a292fa8a629abb7f6fe72a627fc91361 (do not edit this line) */
|
package de.unistuttgart.ims.drama.main;
import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngineDescription;
import java.io.File;
import java.io.FilenameFilter;
import org.apache.uima.collection.CollectionReaderDescription;
import org.apache.uima.fit.factory.AggregateBuilder;
import org.apache.uima.fit.factory.CollectionReaderFactory;
import org.apache.uima.fit.pipeline.SimplePipeline;
import org.apache.uima.resource.ResourceInitializationException;
import com.lexicalscope.jewel.cli.CliFactory;
import com.lexicalscope.jewel.cli.Option;
import de.tudarmstadt.ukp.dkpro.core.io.xmi.XmiWriter;
import de.tudarmstadt.ukp.dkpro.core.matetools.MateLemmatizer;
import de.tudarmstadt.ukp.dkpro.core.stanfordnlp.StanfordNamedEntityRecognizer;
import de.tudarmstadt.ukp.dkpro.core.stanfordnlp.StanfordPosTagger;
import de.tudarmstadt.ukp.dkpro.core.tokit.BreakIteratorSegmenter;
import de.unistuttgart.ims.drama.core.ml.gender.ClearTkGenderAnnotator;
import de.unistuttgart.ims.uimautil.SetCollectionId;
import de.unistuttgart.quadrama.core.D;
import de.unistuttgart.quadrama.core.SD;
import de.unistuttgart.quadrama.core.SP;
import de.unistuttgart.quadrama.core.FigureDetailsAnnotator;
import de.unistuttgart.quadrama.core.FigureMentionDetection;
import de.unistuttgart.quadrama.core.FigureReferenceAnnotator;
import de.unistuttgart.quadrama.core.ReadDlinaMetadata;
import de.unistuttgart.quadrama.core.SceneActAnnotator;
import de.unistuttgart.quadrama.core.SetReferenceDate;
import de.unistuttgart.quadrama.core.SpeakerIdentifier;
import de.unistuttgart.quadrama.io.core.AbstractDramaUrlReader;
import de.unistuttgart.quadrama.io.core.ExportAsCSV;
import de.unistuttgart.quadrama.io.core.ExportAsCONLL;
import de.unistuttgart.quadrama.io.tei.CoreTeiReader;
import de.unistuttgart.quadrama.io.tei.GerDraCorReader;
import de.unistuttgart.quadrama.io.tei.MapFiguresToCastFigures;
import de.unistuttgart.quadrama.io.tei.QuaDramAReader;
import de.unistuttgart.quadrama.io.tei.TextgridTEIUrlReader;
import de.unistuttgart.quadrama.io.tei.TheatreClassiqueReader;
import de.unistuttgart.quadrama.io.tei.TurmReader;
public class TEI2XMI {
enum Corpus {
GERDRACOR, TEXTGRID, TURM, THEATRECLASSIQUE, CORETEI, QUADRAMA
}
public static void main(String[] args) throws Exception {
MyOptions options = CliFactory.parseArguments(MyOptions.class, args);
CollectionReaderDescription reader = getReader(options);
AggregateBuilder builder = new AggregateBuilder();
// Tokenize Utterances
builder.add(D.getWrappedSegmenterDescription(BreakIteratorSegmenter.class));
// Tokenize Stage Directions
builder.add(SD.getWrappedSegmenterDescription(BreakIteratorSegmenter.class));
// Tokenize Speaker Tags
//builder.add(SP.getWrappedSegmenterDescription(BreakIteratorSegmenter.class));
if (options.getCorpus() == Corpus.TURM) {
builder.add(createEngineDescription(SceneActAnnotator.class));
}
builder.add(createEngineDescription(FigureReferenceAnnotator.class));
if (options.getCollectionId() != null)
builder.add(createEngineDescription(SetCollectionId.class, SetCollectionId.PARAM_COLLECTION_ID,
options.getCollectionId()));
else {
switch (options.getCorpus()) {
case GERDRACOR:
builder.add(createEngineDescription(SetCollectionId.class, SetCollectionId.PARAM_COLLECTION_ID, "gdc"));
break;
case TEXTGRID:
builder.add(createEngineDescription(SetCollectionId.class, SetCollectionId.PARAM_COLLECTION_ID, "tg"));
break;
case TURM:
builder.add(
createEngineDescription(SetCollectionId.class, SetCollectionId.PARAM_COLLECTION_ID, "turm"));
break;
case THEATRECLASSIQUE:
builder.add(createEngineDescription(SetCollectionId.class, SetCollectionId.PARAM_COLLECTION_ID, "tc"));
break;
case CORETEI:
builder.add(
createEngineDescription(SetCollectionId.class, SetCollectionId.PARAM_COLLECTION_ID, "ctei"));
break;
case QUADRAMA:
builder.add(createEngineDescription(SetCollectionId.class, SetCollectionId.PARAM_COLLECTION_ID, "qd"));
break;
}
}
builder.add(createEngineDescription(FigureDetailsAnnotator.class));
if (!options.isSkipSpeakerIdentifier()) {
builder.add(createEngineDescription(SpeakerIdentifier.class, SpeakerIdentifier.PARAM_CREATE_SPEAKER_FIGURE,
true));
builder.add(createEngineDescription(MapFiguresToCastFigures.class));
}
if (options.getDlinaDirectory() != null) {
builder.add(createEngineDescription(ReadDlinaMetadata.class, ReadDlinaMetadata.PARAM_DLINA_DIRECTORY,
options.getDlinaDirectory()));
builder.add(createEngineDescription(SetReferenceDate.class));
}
if (options.getGenderModel() != null) {
builder.add(ClearTkGenderAnnotator.getEngineDescription(options.getGenderModel().getAbsolutePath()));
}
builder.add(createEngineDescription(StanfordPosTagger.class));
builder.add(createEngineDescription(MateLemmatizer.class));
if (!options.isSkipNER())
builder.add(createEngineDescription(StanfordNamedEntityRecognizer.class));
builder.add(createEngineDescription(FigureMentionDetection.class));
builder.add(SceneActAnnotator.getDescription());
if (options.getOutput() != null)
builder.add(createEngineDescription(XmiWriter.class, XmiWriter.PARAM_TARGET_LOCATION, options.getOutput()));
if (options.getCSVOutput() != null) {
builder.add(createEngineDescription(ExportAsCSV.class, ExportAsCSV.PARAM_TARGET_LOCATION,
options.getCSVOutput(), ExportAsCSV.PARAM_CSV_VARIANT_NAME, "UtterancesWithTokens"));
builder.add(createEngineDescription(ExportAsCSV.class, ExportAsCSV.PARAM_TARGET_LOCATION,
options.getCSVOutput(), ExportAsCSV.PARAM_CSV_VARIANT_NAME, "StageDirections"));
builder.add(createEngineDescription(ExportAsCSV.class, ExportAsCSV.PARAM_TARGET_LOCATION,
options.getCSVOutput(), ExportAsCSV.PARAM_CSV_VARIANT_NAME, "Segments"));
builder.add(createEngineDescription(ExportAsCSV.class, ExportAsCSV.PARAM_TARGET_LOCATION,
options.getCSVOutput(), ExportAsCSV.PARAM_CSV_VARIANT_NAME, "Metadata"));
builder.add(createEngineDescription(ExportAsCSV.class, ExportAsCSV.PARAM_TARGET_LOCATION,
options.getCSVOutput(), ExportAsCSV.PARAM_CSV_VARIANT_NAME, "Characters"));
builder.add(createEngineDescription(ExportAsCSV.class, ExportAsCSV.PARAM_TARGET_LOCATION,
options.getCSVOutput(), ExportAsCSV.PARAM_CSV_VARIANT_NAME, "Entities"));
}
if (options.getCONLLOutput() != null) {
builder.add(createEngineDescription(ExportAsCONLL.class, ExportAsCONLL.PARAM_TARGET_LOCATION,
options.getCONLLOutput(), ExportAsCONLL.PARAM_CONLL_VARIANT_NAME, "CoNLL2012"));
}
SimplePipeline.runPipeline(reader, builder.createAggregateDescription());
if (options.isDoCleanup() && options.getOutput() != null)
for (File f : options.getOutput().listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith("xmi");
}
})) {
XmlCleanup.cleanUp(f);
}
}
@SuppressWarnings("unchecked")
public static Class<? extends AbstractDramaUrlReader> getReaderClass(String readerClassname) {
Class<?> cl;
try {
cl = Class.forName(readerClassname);
} catch (ClassNotFoundException e) {
e.printStackTrace();
return TextgridTEIUrlReader.class;
}
if (AbstractDramaUrlReader.class.isAssignableFrom(cl))
return (Class<? extends AbstractDramaUrlReader>) cl;
return TextgridTEIUrlReader.class;
}
interface MyOptions extends Options {
@Option(defaultToNull = true)
File getDlinaDirectory();
@Option(defaultToNull = true)
String getCollectionId();
@Option(defaultToNull = true)
File getGenderModel();
@Option()
boolean isDoCleanup();
@Deprecated
@Option(defaultValue = "de.unistuttgart.quadrama.io.tei.textgrid.TextgridTEIUrlReader")
String getReaderClassname();
@Option(defaultValue = "de")
String getLanguage();
@Option()
boolean isSkipNER();
@Option()
boolean isSkipSpeakerIdentifier();
@Option
Corpus getCorpus();
/**
* Storage of the CSV files. Should be a directory.
*
* @return A directory
*/
@Option(longName = "csvOutput", defaultToNull = true)
File getCSVOutput();
/**
* Storage of the CoNLL files. Should be a directory.
*
* @return A directory
*/
@Option(longName = "conllOutput", defaultToNull = true)
File getCONLLOutput();
}
protected static CollectionReaderDescription getReader(MyOptions options) throws ResourceInitializationException {
switch (options.getCorpus()) {
case QUADRAMA:
return CollectionReaderFactory.createReaderDescription(QuaDramAReader.class,
AbstractDramaUrlReader.PARAM_INPUT, options.getInput(),
AbstractDramaUrlReader.PARAM_REMOVE_XML_ANNOTATIONS, true, AbstractDramaUrlReader.PARAM_LANGUAGE,
options.getLanguage());
case GERDRACOR:
return CollectionReaderFactory.createReaderDescription(GerDraCorReader.class,
AbstractDramaUrlReader.PARAM_INPUT, options.getInput(),
AbstractDramaUrlReader.PARAM_REMOVE_XML_ANNOTATIONS, true, AbstractDramaUrlReader.PARAM_LANGUAGE,
options.getLanguage());
case THEATRECLASSIQUE:
return CollectionReaderFactory.createReaderDescription(TheatreClassiqueReader.class,
TheatreClassiqueReader.PARAM_INPUT, options.getInput(),
TheatreClassiqueReader.PARAM_REMOVE_XML_ANNOTATIONS, true, TheatreClassiqueReader.PARAM_LANGUAGE,
options.getLanguage());
case CORETEI:
return CollectionReaderFactory.createReaderDescription(CoreTeiReader.class, CoreTeiReader.PARAM_INPUT,
options.getInput(), CoreTeiReader.PARAM_REMOVE_XML_ANNOTATIONS, true, CoreTeiReader.PARAM_LANGUAGE,
options.getLanguage());
case TURM:
return CollectionReaderFactory.createReaderDescription(TurmReader.class, AbstractDramaUrlReader.PARAM_INPUT,
options.getInput(), TurmReader.PARAM_REMOVE_XML_ANNOTATIONS, true, TurmReader.PARAM_LANGUAGE, "de");
case TEXTGRID:
default:
return CollectionReaderFactory.createReaderDescription(TextgridTEIUrlReader.class,
TextgridTEIUrlReader.PARAM_INPUT, options.getInput(),
TextgridTEIUrlReader.PARAM_REMOVE_XML_ANNOTATIONS, true, TextgridTEIUrlReader.PARAM_STRICT, true,
TextgridTEIUrlReader.PARAM_LANGUAGE, options.getLanguage());
}
}
}
|
package com.galois.qrstream.lib;
import android.hardware.Camera;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import com.galois.qrstream.image.YuvImage;
import com.galois.qrstream.qrpipe.ICaptureFrame;
import com.google.common.collect.Queues;
import org.jetbrains.annotations.NotNull;
/**
* CameraManager services requests for a preview frame from the camera
*/
public class CameraManager implements ICaptureFrame {
private static final CameraManager INSTANCE = new CameraManager();
private static final BlockingQueue<YuvImage> currentFrame = Queues.newSynchronousQueue();
// When isRunning is false it signals that the camera is not available
// and any decoding of QR in progress should be stopped.
private boolean isRunning = false;
private Camera camera;
private Preview previewCallback;
public static CameraManager getInstance() { return INSTANCE; }
private CameraManager () {}
// Handler is bound to the same thread that created the CameraManager
// i.e. the UI thread. Perhaps this should get moved?
private static final Handler frameHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (msg.obj != null) {
if(currentFrame.offer((YuvImage) msg.obj) == false) {
Log.e(Constants.APP_TAG, "CameraManager tried to set currentFrame before successful read.");
}else {
Log.d(Constants.APP_TAG, "CameraManager set currentFrame.");
}
}else{
// Probably not a big deal as it would just cause qrlib to stop decoding QR codes
Log.d(Constants.APP_TAG, "CameraManager asked to handle NULL message.");
}
}
};
@Override
public Object clone() throws CloneNotSupportedException {
throw new CloneNotSupportedException("Cannot clone CameraManager");
}
public void startRunning(@NotNull Camera camera,
@NotNull Preview previewCallback) {
this.camera = camera;
this.previewCallback = previewCallback;
this.isRunning = true;
camera.setPreviewCallback(null);
}
public void stopRunning() {
Log.e(Constants.APP_TAG, "CameraManager stopRunning called.");
// Release camera and callback
if (camera != null && previewCallback != null) {
isRunning = false;
camera.setPreviewCallback(null);
camera = null;
previewCallback = null;
}
}
// When isRunning is false, it signals that camera and preview callback are not valid
// and that any decoding of QR codes should stop.
@Override
public boolean isRunning() {
return isRunning;
}
@Override
public synchronized YuvImage captureFrameFromCamera() {
// Only one thread at a time can request a frame from the camera
setupOneShotPreviewCallback();
return getFrame();
}
// Waits for preview frame from frameHandler before returning.
private synchronized YuvImage getFrame() {
YuvImage img = null;
try {
img = currentFrame.poll(Constants.RECEIVE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
// Interrupted while waiting for image. Log interruption and return null frame.
Log.v(Constants.APP_TAG, "CameraManager interrupted while waiting for image.");
}
if(img == null) {
Log.v(Constants.APP_TAG, "CameraManager received null preview frame.");
}
return img;
}
// Setup camera callback to handle next preview frame
private synchronized void setupOneShotPreviewCallback() {
if ((camera != null) && (previewCallback != null)) {
previewCallback.setHandler(frameHandler);
camera.setOneShotPreviewCallback(previewCallback);
} else {
if (camera == null) {
Log.e(Constants.APP_TAG, "Cannot request preview frame when camera is not initialized.");
}
if (previewCallback == null) {
Log.e(Constants.APP_TAG, "Cannot request preview frame from camera without " +
"first specifying a handler for the preview frames.");
}
}
}
}
|
package org.cytoscape.ding.impl;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.event.ComponentAdapter;
import java.awt.event.ComponentEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.print.Printable;
import java.util.Arrays;
import java.util.Properties;
import javax.swing.Icon;
import javax.swing.JComponent;
import org.cytoscape.ding.impl.DRenderingEngine.UpdateType;
import org.cytoscape.ding.impl.canvas.BirdsEyeViewRenderComponent;
import org.cytoscape.ding.impl.canvas.RenderComponent;
import org.cytoscape.event.DebounceTimer;
import org.cytoscape.model.CyNetwork;
import org.cytoscape.service.util.CyServiceRegistrar;
import org.cytoscape.view.model.CyNetworkViewSnapshot;
import org.cytoscape.view.model.View;
import org.cytoscape.view.model.VisualLexicon;
import org.cytoscape.view.model.VisualProperty;
import org.cytoscape.view.presentation.RenderingEngine;
import org.cytoscape.view.presentation.property.BasicVisualLexicon;
public final class BirdsEyeView implements RenderingEngine<CyNetwork>, ContentChangeListener {
private static final double SCALE_FACTOR = 0.97;
private final double[] extents = new double[4];
private DRenderingEngine re;
private final RenderComponent renderComponent;
private final DebounceTimer contentChangedTimer;
public BirdsEyeView(DRenderingEngine re, CyServiceRegistrar registrar) {
this.re = re;
var lod = new BirdsEyeViewLOD(re.getGraphLOD());
renderComponent = new BirdsEyeViewRenderComponent(re, lod);
renderComponent.setBackgroundPaint(re.getBackgroundColor());
renderComponent.showAnnotationSelection(false);
var mouseListener = new InnerMouseListener();
renderComponent.addMouseListener(mouseListener);
renderComponent.addMouseMotionListener(mouseListener);
renderComponent.addMouseWheelListener(mouseListener);
re.addTransformChangeListener(renderComponent::repaint);
re.addContentChangeListener(this);
contentChangedTimer = new DebounceTimer(200);
renderComponent.addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent e) {
// make sure the view is initialized properly, but we have
// to wait for setBounds() to be called to know the viewport size
contentChanged();
e.getComponent().removeComponentListener(this);
}
});
}
@Override
public void contentChanged() {
renderComponent.updateView(UpdateType.ALL_FAST);
contentChangedTimer.debounce(() -> {
fitCanvasToNetwork();
renderComponent.setBackgroundPaint(re.getBackgroundColor());
renderComponent.updateView(UpdateType.ALL_FULL);
});
}
public JComponent getComponent() {
return renderComponent;
}
@Override
public Image createImage(int width, int height) {
return null;
}
/**
* Returns the extents of the nodes, in node coordinates.
*/
private boolean getNetworkExtents(double[] extents) {
CyNetworkViewSnapshot netViewSnapshot = re.getViewModelSnapshot();
if(netViewSnapshot.getNodeCount() == 0) {
Arrays.fill(extents, 0.0);
return false;
}
netViewSnapshot.getSpacialIndex2D().getMBR(extents);
return true;
}
private void fitCanvasToNetwork() {
boolean hasComponents = getNetworkExtents(extents);
hasComponents |= re.getCyAnnotator().adjustBoundsToIncludeAnnotations(extents);
double myXCenter;
double myYCenter;
double myScaleFactor;
if(hasComponents) {
myXCenter = (extents[0] + extents[2]) / 2.0d;
myYCenter = (extents[1] + extents[3]) / 2.0d;
myScaleFactor = SCALE_FACTOR *
Math.min(((double) renderComponent.getWidth()) / (extents[2] - extents[0]),
((double) renderComponent.getHeight()) / (extents[3] - extents[1]));
} else {
myXCenter = 0.0d;
myYCenter = 0.0d;
myScaleFactor = 1.0d;
}
renderComponent.setCenter(myXCenter, myYCenter);
renderComponent.setScaleFactor(myScaleFactor);
}
private final class InnerMouseListener extends MouseAdapter {
private int currMouseButton = 0;
private int lastXMousePos = 0;
private int lastYMousePos = 0;
@Override public void mousePressed(MouseEvent e) {
if(e.getButton() == MouseEvent.BUTTON1) {
currMouseButton = 1;
lastXMousePos = e.getX(); // needed by drag listener
lastYMousePos = e.getY();
double myXCenter = renderComponent.getTransform().getCenterX();
double myYCenter = renderComponent.getTransform().getCenterY();
double myScaleFactor = renderComponent.getTransform().getScaleFactor();
double halfWidth = (double)renderComponent.getWidth() / 2.0d;
double halfHeight = (double)renderComponent.getHeight() / 2.0d;
double centerX = ((lastXMousePos - halfWidth) / myScaleFactor) + myXCenter;
double centerY = ((lastYMousePos - halfHeight) / myScaleFactor) + myYCenter;
re.setCenter(centerX, centerY);
re.updateView(UpdateType.ALL_FULL);
}
}
@Override
public void mouseReleased(MouseEvent e) {
if(e.getButton() == MouseEvent.BUTTON1) {
if(currMouseButton == 1) {
currMouseButton = 0;
}
}
re.updateView(UpdateType.ALL_FULL);
}
@Override
public void mouseDragged(MouseEvent e) {
if(currMouseButton == 1) {
int currX = e.getX();
int currY = e.getY();
double deltaX = 0;
double deltaY = 0;
double myScaleFactor = renderComponent.getTransform().getScaleFactor();
if (!re.getViewModel().isValueLocked(BasicVisualLexicon.NETWORK_CENTER_X_LOCATION)) {
deltaX = (currX - lastXMousePos) / myScaleFactor;
lastXMousePos = currX;
}
if (!re.getViewModel().isValueLocked(BasicVisualLexicon.NETWORK_CENTER_Y_LOCATION)) {
deltaY = (currY - lastYMousePos) / myScaleFactor;
lastYMousePos = currY;
}
if (deltaX != 0 || deltaY != 0) {
re.pan(deltaX, deltaY);
re.updateView(UpdateType.ALL_FAST);
}
}
}
@Override
public void mouseWheelMoved(MouseWheelEvent e) {
re.getInputHandlerGlassPane().processMouseWheelEvent(e);
}
}
@Override
public View<CyNetwork> getViewModel() {
return re.getViewModel();
}
@Override
public VisualLexicon getVisualLexicon() {
return re.getVisualLexicon();
}
@Override
public Properties getProperties() {
return re.getProperties();
}
@Override
public Printable createPrintable() {
return re.createPrintable();
}
@Override
public <V> Icon createIcon(VisualProperty<V> vp, V value, int width, int height) {
return re.createIcon(vp, value, width, height);
}
@Override
public void printCanvas(Graphics printCanvas) {
throw new UnsupportedOperationException("Printing is not supported for Bird's eye view.");
}
@Override
public void dispose() {
renderComponent.dispose();
re = null;
}
@Override
public String getRendererId() {
return DingRenderer.ID;
}
}
|
package org.cytoscape.ding.impl;
import static org.cytoscape.ding.DVisualLexicon.EDGE_CURVED;
import static org.cytoscape.ding.DVisualLexicon.EDGE_SOURCE_ARROW_UNSELECTED_PAINT;
import static org.cytoscape.ding.DVisualLexicon.EDGE_TARGET_ARROW_UNSELECTED_PAINT;
import static org.cytoscape.view.presentation.property.BasicVisualLexicon.*;
import java.awt.Color;
import java.awt.Font;
import java.awt.Paint;
import java.awt.Stroke;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.cytoscape.ding.impl.BendStore.HandleKey;
import org.cytoscape.ding.impl.strokes.DAnimatedStroke;
import org.cytoscape.graph.render.immed.EdgeAnchors;
import org.cytoscape.graph.render.stateful.EdgeDetails;
import org.cytoscape.model.CyEdge;
import org.cytoscape.model.CyNode;
import org.cytoscape.util.intr.LongEnumerator;
import org.cytoscape.util.intr.MinLongHeap;
import org.cytoscape.view.model.CyNetworkViewSnapshot;
import org.cytoscape.view.model.SnapshotEdgeInfo;
import org.cytoscape.view.model.View;
import org.cytoscape.view.presentation.property.ArrowShapeVisualProperty;
import org.cytoscape.view.presentation.property.BasicVisualLexicon;
import org.cytoscape.view.presentation.property.values.ArrowShape;
import org.cytoscape.view.presentation.property.values.Bend;
import org.cytoscape.view.presentation.property.values.Handle;
import org.cytoscape.view.presentation.property.values.LineType;
public final class DEdgeDetails implements EdgeDetails {
private final DRenderingEngine re;
private Map<View<CyEdge>,DAnimatedStroke> animatedStrokes = null;
public DEdgeDetails(DRenderingEngine re) {
this.re = re;
}
@Override
public boolean isSelected(View<CyEdge> edgeView) {
return Boolean.TRUE.equals(edgeView.getVisualProperty(BasicVisualLexicon.EDGE_SELECTED));
}
@Override
public boolean isVisible(View<CyEdge> edgeView) {
return Boolean.TRUE.equals(edgeView.getVisualProperty(BasicVisualLexicon.EDGE_VISIBLE));
}
@Override
public Color getColorLowDetail(CyNetworkViewSnapshot netView, View<CyEdge> edgeView) {
if (isSelected(edgeView))
return getSelectedColorLowDetail(netView, edgeView);
else
return getUnselectedColorLowDetail(netView, edgeView);
}
private Color getUnselectedColorLowDetail(CyNetworkViewSnapshot netView, View<CyEdge> edgeView) {
Paint paint = edgeView.getVisualProperty(EDGE_STROKE_UNSELECTED_PAINT);
if(paint instanceof Color)
return (Color) paint;
paint = netView.getViewDefault(EDGE_STROKE_UNSELECTED_PAINT);
if(paint instanceof Color)
return (Color) paint;
return (Color) EDGE_STROKE_UNSELECTED_PAINT.getDefault();
}
private Color getSelectedColorLowDetail(CyNetworkViewSnapshot netView, View<CyEdge> edgeView) {
Paint paint = edgeView.getVisualProperty(EDGE_STROKE_SELECTED_PAINT);
if(paint instanceof Color)
return (Color) paint;
paint = netView.getViewDefault(EDGE_STROKE_SELECTED_PAINT);
if(paint instanceof Color)
return (Color) paint;
return (Color) EDGE_STROKE_SELECTED_PAINT.getDefault();
}
@Override
public ArrowShape getSourceArrowShape(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_SOURCE_ARROW_SHAPE);
}
@Override
public Paint getSourceArrowPaint(View<CyEdge> edgeView) {
if (isSelected(edgeView))
return getSelectedPaint(edgeView);
else
return getSourceArrowUnselectedPaint(edgeView);
}
private final Paint getSourceArrowUnselectedPaint(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_SOURCE_ARROW_UNSELECTED_PAINT);
}
@Override
public ArrowShape getTargetArrowShape(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_TARGET_ARROW_SHAPE);
}
@Override
public Paint getTargetArrowPaint(View<CyEdge> edgeView) {
if (isSelected(edgeView))
return getSelectedPaint(edgeView);
else
return getTargetArrowUnselectedPaint(edgeView);
}
private final Paint getTargetArrowUnselectedPaint(View<CyEdge> edgeView) {
Paint paint = edgeView.getVisualProperty(EDGE_TARGET_ARROW_UNSELECTED_PAINT);
Integer trans = edgeView.getVisualProperty(EDGE_TRANSPARENCY);
return DNodeDetails.getTransparentColor(paint, trans);
}
@Override
public float getWidth(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_WIDTH).floatValue();
}
private Stroke getEdgeStroke(View<CyEdge> edgeView) {
LineType lineType = edgeView.getVisualProperty(EDGE_LINE_TYPE);
float width = (float) getWidth(edgeView);
return DLineType.getDLineType(lineType).getStroke(width);
}
@Override
public Stroke getStroke(View<CyEdge> edgeView) {
Stroke stroke = animatedStrokes == null ? null : animatedStrokes.get(edgeView);
if(stroke == null) {
stroke = getEdgeStroke(edgeView);
}
return stroke;
}
@Override
public void updateAnimatedEdges(Collection<View<CyEdge>> edges) {
if(edges.isEmpty()) {
animatedStrokes = null;
return;
}
if(animatedStrokes == null) {
animatedStrokes = new HashMap<>();
} else {
animatedStrokes.keySet().retainAll(edges);
}
for(View<CyEdge> edge : edges) {
DAnimatedStroke animatedStroke = animatedStrokes.get(edge);
Stroke stroke = getEdgeStroke(edge);
if(animatedStroke == null || !sameStroke(animatedStroke, stroke)) {
animatedStrokes.put(edge, (DAnimatedStroke)stroke);
}
}
}
private static boolean sameStroke(DAnimatedStroke animatedStroke, Stroke stroke) {
return animatedStroke.getClass().equals(stroke.getClass())
&& animatedStroke.getWidth() == ((DAnimatedStroke)stroke).getWidth();
}
@Override
public void advanceAnimatedEdges() {
animatedStrokes.replaceAll((edge,stroke) -> stroke.newInstanceForNextOffset());
}
@Override
public Paint getPaint(View<CyEdge> edgeView) {
return isSelected(edgeView) ? getSelectedPaint(edgeView) : getUnselectedPaint(edgeView);
}
@Override
public Paint getUnselectedPaint(View<CyEdge> edgeView) {
Paint paint = edgeView.getVisualProperty(EDGE_STROKE_UNSELECTED_PAINT);
if(paint == null)
paint = edgeView.getVisualProperty(EDGE_UNSELECTED_PAINT);
Integer trans = edgeView.getVisualProperty(EDGE_TRANSPARENCY);
return DNodeDetails.getTransparentColor(paint, trans);
}
@Override
public Paint getSelectedPaint(View<CyEdge> edgeView) {
Paint paint = edgeView.getVisualProperty(EDGE_STROKE_SELECTED_PAINT);
if(paint == null)
paint = edgeView.getVisualProperty(EDGE_SELECTED_PAINT);
Integer trans = edgeView.getVisualProperty(EDGE_TRANSPARENCY);
return DNodeDetails.getTransparentColor(paint, trans);
}
@Override
public int getLabelCount(View<CyEdge> edgeView) {
String label = getLabelText(edgeView);
return (label == null || label.isEmpty()) ? 0 : 1;
}
@Override
public String getLabelText(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_LABEL);
}
@Override
public String getTooltipText(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_TOOLTIP);
}
public Integer getTransparency(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_TRANSPARENCY);
}
public Integer getLabelTransparency(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_LABEL_TRANSPARENCY);
}
@Override
public Font getLabelFont(View<CyEdge> edgeView) {
Number size = edgeView.getVisualProperty(EDGE_LABEL_FONT_SIZE);
Font font = edgeView.getVisualProperty(EDGE_LABEL_FONT_FACE);
if (size != null && font != null)
font = font.deriveFont(size.floatValue());
return font;
}
@Override
public Paint getLabelPaint(View<CyEdge> edgeView) {
Paint paint = edgeView.getVisualProperty(EDGE_LABEL_COLOR);
Integer trans = edgeView.getVisualProperty(EDGE_LABEL_TRANSPARENCY);
return DNodeDetails.getTransparentColor(paint, trans);
}
@Override
public double getLabelWidth(View<CyEdge> edgeView) {
return edgeView.getVisualProperty(EDGE_LABEL_WIDTH);
}
@Override
public float getSourceArrowSize(View<CyEdge> edgeView) {
Double size = edgeView.getVisualProperty(EDGE_SOURCE_ARROW_SIZE);
return adjustArrowSize(edgeView, getSourceArrowShape(edgeView), size);
}
@Override
public float getTargetArrowSize(View<CyEdge> edgeView) {
Double size = edgeView.getVisualProperty(EDGE_TARGET_ARROW_SIZE);
return adjustArrowSize(edgeView, getSourceArrowShape(edgeView), size);
}
private float adjustArrowSize(View<CyEdge> edgeView, ArrowShape arrowType, Number size) {
// For the half arrows, we need to scale multiplicatively so that the arrow matches the line.
if (arrowType == ArrowShapeVisualProperty.HALF_TOP || arrowType == ArrowShapeVisualProperty.HALF_BOTTOM)
return (float) getWidth(edgeView) * size.floatValue();
else // For all other arrows, we can scale additively. This produces less egregious big arrows.
return (float) getWidth(edgeView) + size.floatValue();
}
@Override
public Integer getLineCurved(View<CyEdge> edgeView) {
Boolean curved = edgeView.getVisualProperty(EDGE_CURVED);
return Boolean.TRUE.equals(curved) ? CURVED_LINES : STRAIGHT_LINES;
}
@Override
public Bend getBend(View<CyEdge> edgeView) {
return getBend(edgeView, false);
}
@Override
public Bend getBend(View<CyEdge> edgeView, boolean forceCreate) {
Bend bend = edgeView.getVisualProperty(EDGE_BEND);
if (bend == null && forceCreate) {
bend = new BendImpl();
}
return bend;
}
@Override
public float getAnchorSize(View<CyEdge> edgeView, int anchorInx) {
if (isSelected(edgeView) && getNumAnchors(edgeView) > 0)
return BendStore.DEFAULT_HANDLE_SIZE;
return 0.0f;
}
@Override
public Paint getAnchorPaint(View<CyEdge> edgeView, int anchorInx) {
if (getLineCurved(edgeView) == STRAIGHT_LINES)
anchorInx = anchorInx / 2;
HandleKey handleKey = new HandleKey(edgeView.getSUID(), anchorInx);
if(re.getBendStore().isHandleSelected(handleKey))
return getSelectedPaint(edgeView);
else
return getUnselectedPaint(edgeView);
}
private int getNumAnchors(View<CyEdge> edgeView) {
Bend bend = getBend(edgeView);
if(bend == null)
return 0;
var handles = bend.getAllHandles();
if(handles == null)
return 0;
int numHandles = handles.size();
if(numHandles == 0)
return 0;
return getLineCurved(edgeView) == CURVED_LINES ? numHandles : 2 * numHandles;
}
/**
* Actual method to be used in the Graph Renderer.
*/
private void getAnchor(View<CyEdge> edgeView, int anchorIndex, float[] anchorArr) {
Bend bend = getBend(edgeView);
Handle handle;
if (getLineCurved(edgeView) == CURVED_LINES)
handle = bend.getAllHandles().get(anchorIndex);
else
handle = bend.getAllHandles().get(anchorIndex/2);
Point2D newPoint = handle.calculateHandleLocation(re.getViewModelSnapshot(), edgeView);
anchorArr[0] = (float) newPoint.getX();
anchorArr[1] = (float) newPoint.getY();
}
// MKTODO this may need to be optimized
private List<View<CyEdge>> getConnectingEdgeList(CyNetworkViewSnapshot netView, long sourceSuid, long targetSuid) {
List<View<CyEdge>> connectingEdges = new ArrayList<>();
Iterable<View<CyEdge>> adjacentEdgeIterable = netView.getAdjacentEdgeIterable(sourceSuid);
for(View<CyEdge> edge : adjacentEdgeIterable) {
SnapshotEdgeInfo edgeInfo = netView.getEdgeInfo(edge);
long otherNode = sourceSuid ^ edgeInfo.getSourceViewSUID() ^ edgeInfo.getTargetViewSUID();
if(targetSuid == otherNode) {
connectingEdges.add(edge);
}
}
return connectingEdges;
}
@Override
public EdgeAnchors getAnchors(CyNetworkViewSnapshot netView, View<CyEdge> edgeView) {
if (edgeView == null)
return null;
final EdgeAnchors returnThis = new EdgeAnchors() {
public int numAnchors() {
return DEdgeDetails.this.getNumAnchors(edgeView);
}
public void getAnchor(int anchorIndex, float[] anchorArr) {
DEdgeDetails.this.getAnchor(edgeView, anchorIndex, anchorArr);
}
};
int numAnchors = getNumAnchors(edgeView);
if (numAnchors > 0) {
return returnThis;
}
float[] extentsBuff = new float[4];
SnapshotEdgeInfo edgeInfo = netView.getEdgeInfo(edgeView);
final long srcNodeIndex = edgeInfo.getSourceViewSUID();
final long trgNodeIndex = edgeInfo.getTargetViewSUID();
// Calculate anchors necessary for self edges.
if (srcNodeIndex == trgNodeIndex) {
netView.getSpacialIndex2D().get(srcNodeIndex, extentsBuff);
final double w = ((double) extentsBuff[2]) - extentsBuff[0];
final double h = ((double) extentsBuff[3]) - extentsBuff[1];
final double x = (((double) extentsBuff[0]) + extentsBuff[2]) / 2.0d;
final double y = (((double) extentsBuff[1]) + extentsBuff[3]) / 2.0d;
final double nodeSize = Math.max(w, h);
List<View<CyEdge>> selfEdgeList = getConnectingEdgeList(netView, srcNodeIndex, srcNodeIndex);
int i = 0;
for (View<CyEdge> selfEdge : selfEdgeList) {
if (selfEdge.getSUID() == edgeView.getSUID())
break; // MKTODO break???? shouldn't this be continue??? what about edges that haven't been processed yet???
if (getNumAnchors(selfEdge) == 0)
i++;
}
final int inx = i;
return new EdgeAnchors() {
@Override
public int numAnchors() {
return 2;
}
@Override
public void getAnchor(int anchorInx, float[] anchorArr) {
if (anchorInx == 0) {
anchorArr[0] = (float) (x - (((inx + 3) * nodeSize) / 2.0d));
anchorArr[1] = (float) y;
} else if (anchorInx == 1) {
anchorArr[0] = (float) x;
anchorArr[1] = (float) (y - (((inx + 3) * nodeSize) / 2.0d));
}
}
};
}
MinLongHeap heap = new MinLongHeap();
// Now add "invisible" anchors to edges for the case where multiple edges
// exist between two nodes. This has no effect if user specified anchors exist on the edge.
while (true) {
// By consistently ordering the source and target nodes, dx and dy will always
// be calculated according to the same orientation. This allows the offset
// calculation to toggle the edges from side to side without any overlap.
long tmpSrcIndex = Math.min(srcNodeIndex, trgNodeIndex);
long tmpTrgIndex = Math.max(srcNodeIndex, trgNodeIndex);
// Sort the connecting edges.
View<CyNode> tmpSrc = netView.getNodeView(tmpSrcIndex);
View<CyNode> tmpTrg = netView.getNodeView(tmpTrgIndex);
List<View<CyEdge>> conEdgeList = getConnectingEdgeList(netView, tmpSrc.getSUID(), tmpTrg.getSUID());
for (View<CyEdge> conEdge : conEdgeList) {
heap.toss(conEdge.getSUID());
}
LongEnumerator otherEdges = heap.orderedElements(false);
long otherEdge = otherEdges.nextLong();
// If the first other edge is the same as this edge,
// (i.e. we're at the end of the list?).
if (otherEdge == edgeView.getSUID())
break;
// So we don't count the other edge twice?
View<CyEdge> otherEdgeView = netView.getEdgeView(otherEdge);
if (otherEdgeView == null)
continue;
int i = (getNumAnchors(otherEdgeView) == 0) ? 1 : 0;
// Count the number of other edges.
while (true) {
if (edgeView.getSUID() == (otherEdge = otherEdges.nextLong()) || otherEdge == -1)
break;
if (re.getBendStore().hasHandles(otherEdgeView))
i++;
}
final int inx = i;
// Get source node size and position.
netView.getSpacialIndex2D().get(tmpSrcIndex, extentsBuff);
final double srcW = ((double) extentsBuff[2]) - extentsBuff[0];
final double srcH = ((double) extentsBuff[3]) - extentsBuff[1];
final double srcX = (((double) extentsBuff[0]) + extentsBuff[2]) / 2.0d;
final double srcY = (((double) extentsBuff[1]) + extentsBuff[3]) / 2.0d;
// Get target node size and position.
netView.getSpacialIndex2D().get(tmpTrgIndex, extentsBuff);
final double trgW = ((double) extentsBuff[2]) - extentsBuff[0];
final double trgH = ((double) extentsBuff[3]) - extentsBuff[1];
final double trgX = (((double) extentsBuff[0]) + extentsBuff[2]) / 2.0d;
final double trgY = (((double) extentsBuff[1]) + extentsBuff[3]) / 2.0d;
// Used for determining the space between the edges.
final double nodeSize = Math.max(Math.max(Math.max(srcW, srcH), trgW), trgH);
// Midpoint between nodes.
final double midX = (srcX + trgX) / 2;
final double midY = (srcY + trgY) / 2;
// Distance in X and Y dimensions.
// Note that dx and dy may be negative. This is OK, because this will ensure
// that the handle is always correctly placed offset from the midpoint of,
// and perpendicular to, the original edge.
final double dx = trgX - srcX;
final double dy = trgY - srcY;
// Distance or length between nodes.
final double len = Math.sqrt((dx * dx) + (dy * dy));
if (((float) len) == 0.0f)
break;
// This determines which side of the first edge and how far from the first
// edge the other edge should be placed.
// - Divide by 2 puts consecutive edges at the same distance from the center because of integer math.
// - Modulo puts consecutive edges on opposite sides.
// - Node size is for consistent scaling.
final double offset = ((inx + 1) / 2) * (inx % 2 == 0 ? 1 : -1) * nodeSize;
// Depending on orientation sine or cosine. This adjusts the length
// of the offset according the appropriate X and Y dimensions.
final double normX = dx / len;
final double normY = dy / len;
// Calculate the anchor points.
final double anchorX = midX + (offset * normY);
final double anchorY = midY - (offset * normX);
return new EdgeAnchors() {
public int numAnchors() {
return 1;
}
public void getAnchor(int inx, float[] arr) {
arr[0] = (float) anchorX;
arr[1] = (float) anchorY;
}
};
}
return returnThis;
}
}
|
package com.playtika.test.neo4j;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.AutoConfigureOrder;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.Ordered;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.MapPropertySource;
import org.testcontainers.containers.BindMode;
import org.testcontainers.containers.GenericContainer;
import java.util.LinkedHashMap;
import static com.playtika.test.common.utils.ContainerUtils.containerLogsConsumer;
import static com.playtika.test.neo4j.Neo4jProperties.BEAN_NAME_EMBEDDED_NEO4J;
@Slf4j
@Configuration
@AutoConfigureOrder(Ordered.HIGHEST_PRECEDENCE)
@EnableConfigurationProperties(Neo4jProperties.class)
public class EmbeddedNeo4jAutoConfiguration {
@Bean
@ConditionalOnMissingBean
Neo4jStatusCheck neo4jStartupCheckStrategy(Neo4jProperties properties) {
return new Neo4jStatusCheck();
}
@Bean(name = BEAN_NAME_EMBEDDED_NEO4J, destroyMethod = "stop")
public GenericContainer neo4j(ConfigurableEnvironment environment,
Neo4jProperties properties,
Neo4jStatusCheck neo4jStatusCheck) throws Exception {
log.info("Starting neo4j server. Docker image: {}", properties.dockerImage);
GenericContainer neo4j = new GenericContainer(properties.dockerImage)
.withLogConsumer(containerLogsConsumer(log))
.withExposedPorts(
properties.httpsPort,
properties.httpPort,
properties.boltPort)
.withClasspathResourceMapping(
"neo4j-health.sh",
"/neo4j-health.sh",
BindMode.READ_ONLY)
.waitingFor(neo4jStatusCheck);
neo4j.start();
registerNeo4jEnvironment(neo4j, environment, properties);
return neo4j;
}
private void registerNeo4jEnvironment(GenericContainer neo4j,
ConfigurableEnvironment environment,
Neo4jProperties properties) {
Integer httpsPort = neo4j.getMappedPort(properties.httpsPort);
Integer httpPort = neo4j.getMappedPort(properties.httpPort);
Integer boltPort = neo4j.getMappedPort(properties.boltPort);
String host = neo4j.getContainerIpAddress();
LinkedHashMap<String, Object> map = new LinkedHashMap<>();
map.put("embedded.neo4j.httpsPort", httpsPort);
map.put("embedded.neo4j.httpPort", httpPort);
map.put("embedded.neo4j.boltPort", boltPort);
map.put("embedded.neo4j.host", host);
map.put("embedded.neo4j.password", properties.getPassword());
map.put("embedded.neo4j.user", properties.getUser());
log.info("Started neo4j server. Connection details {}, " +
"Admin UI: http://localhost:{}, user: {}, password: {}",
map, httpPort, properties.getUser(), properties.getPassword());
MapPropertySource propertySource = new MapPropertySource("embeddedNeo4jInfo", map);
environment.getPropertySources().addFirst(propertySource);
}
}
|
package com.matthewtamlin.mixtape.example.activities;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.view.MenuItem;
import com.matthewtamlin.mixtape.example.R;
import com.matthewtamlin.mixtape.example.data.HeaderDataSource;
import com.matthewtamlin.mixtape.example.data.Mp3Song;
import com.matthewtamlin.mixtape.example.data.Mp3SongDataSource;
import com.matthewtamlin.mixtape.library.caching.LibraryItemCache;
import com.matthewtamlin.mixtape.library.caching.LruLibraryItemCache;
import com.matthewtamlin.mixtape.library.data.DisplayableDefaults;
import com.matthewtamlin.mixtape.library.data.ImmutableDisplayableDefaults;
import com.matthewtamlin.mixtape.library.data.LibraryItem;
import com.matthewtamlin.mixtape.library.data.LibraryReadException;
import com.matthewtamlin.mixtape.library.databinders.ArtworkBinder;
import com.matthewtamlin.mixtape.library.databinders.SubtitleBinder;
import com.matthewtamlin.mixtape.library.databinders.TitleBinder;
import com.matthewtamlin.mixtape.library.mixtape_body.BodyContract;
import com.matthewtamlin.mixtape.library.mixtape_body.GridBody;
import com.matthewtamlin.mixtape.library.mixtape_body.RecyclerViewBodyPresenter;
import com.matthewtamlin.mixtape.library.mixtape_coordinator.CoordinatedMixtapeContainer;
import com.matthewtamlin.mixtape.library.mixtape_header.HeaderContract;
import com.matthewtamlin.mixtape.library.mixtape_header.SmallHeader;
import com.matthewtamlin.mixtape.library.mixtape_header.SmallHeaderPresenter;
public class PlaylistActivity extends AppCompatActivity {
private CoordinatedMixtapeContainer rootView;
private SmallHeader header;
private GridBody body;
private HeaderDataSource headerDataSource;
private Mp3SongDataSource bodyDataSource;
private SmallHeaderPresenter<HeaderDataSource> headerPresenter;
private RecyclerViewBodyPresenter<Mp3Song, Mp3SongDataSource> bodyPresenter;
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.example_layout);
setupHeaderView();
setupBodyView();
setupContainerView();
setupDataSources();
setupHeaderPresenter();
setupBodyPresenter();
}
private void setupHeaderView() {
header = new SmallHeader(this);
header.setOverflowMenuResource(R.menu.header_menu);
header.setExtraButtons(new Bitmap[]{
BitmapFactory.decodeResource(getResources(), R.drawable.ic_play),
BitmapFactory.decodeResource(getResources(), R.drawable.ic_share),
BitmapFactory.decodeResource(getResources(), R.drawable.ic_shuffle)
});
}
private void setupBodyView() {
body = new GridBody(this);
body.setContextualMenuResource(R.menu.song_menu);
}
private void setupContainerView() {
rootView = (CoordinatedMixtapeContainer) findViewById(R.id.example_layout_coordinator);
rootView.setBody(body);
rootView.setHeader(header);
rootView.showHeaderAtTopOnly();
}
private void setupDataSources() {
bodyDataSource = new Mp3SongDataSource();
final Bitmap headerArtwork = BitmapFactory.decodeResource(getResources(),
R.raw.header_artwork);
headerDataSource = new HeaderDataSource("All Songs", "Various artists", headerArtwork);
}
private void setupHeaderPresenter() {
final DisplayableDefaults defaults = new ImmutableDisplayableDefaults(
"Playlist", "Unknown artists", BitmapFactory.decodeResource(getResources(),
R.raw.default_artwork));
final LibraryItemCache cache = new LruLibraryItemCache(10000, 10000, 100000);
final TitleBinder titleBinder = new TitleBinder(cache, defaults);
final SubtitleBinder subtitleBinder = new SubtitleBinder(cache, defaults);
final ArtworkBinder artworkBinder = new ArtworkBinder(cache, defaults, 300);
headerPresenter = new SmallHeaderPresenter<HeaderDataSource>
(titleBinder, subtitleBinder, artworkBinder) {
@Override
public void onExtraButtonClicked(final HeaderContract.View hostView, final int index) {
handleHeaderExtraButtonClicked(index);
}
@Override
public void onOverflowMenuItemClicked(final HeaderContract.View hostView,
final MenuItem menuItem) {
handleHeaderOverflowMenuItemClicked(menuItem);
}
};
headerPresenter.setView(header);
headerPresenter.setDataSource(headerDataSource);
headerPresenter.present(true);
}
private void setupBodyPresenter() {
final DisplayableDefaults defaults = new ImmutableDisplayableDefaults(
"Unknown title", "Unknown artist", BitmapFactory.decodeResource(getResources(),
R.raw.default_artwork));
final LibraryItemCache cache = new LruLibraryItemCache(10000, 10000, 1000000);
final TitleBinder titleBinder = new TitleBinder(cache, defaults);
final SubtitleBinder subtitleBinder = new SubtitleBinder(cache, defaults);
final ArtworkBinder artworkBinder = new ArtworkBinder(cache, defaults, 300);
bodyPresenter = new RecyclerViewBodyPresenter<Mp3Song, Mp3SongDataSource>
(titleBinder, subtitleBinder, artworkBinder) {
@Override
public void onContextualMenuItemClicked(final BodyContract.View hostView,
final LibraryItem item, final MenuItem menuItem) {
handleBodyItemMenuItemClicked(item, menuItem);
}
@Override
public void onItemClicked(final BodyContract.View hostView, final LibraryItem item) {
handleBodyItemClicked(item);
}
};
bodyPresenter.setView(body);
bodyPresenter.setDataSource(bodyDataSource);
}
private void handleHeaderExtraButtonClicked(final int index) {
switch (index) {
case 0: {
displayMessage("Playing all songs...");
break;
}
case 1: {
final Intent sendIntent = new Intent();
sendIntent.setAction(Intent.ACTION_SEND);
sendIntent.putExtra(Intent.EXTRA_TEXT, "https://github.com/MatthewTamlin/Mixtape");
sendIntent.setType("text/plain");
startActivity(Intent.createChooser(sendIntent, "Download Mixtape to listen!"));
break;
}
case 2: {
displayMessage("Shuffling all songs...");
}
}
}
private void handleHeaderOverflowMenuItemClicked(final MenuItem item) {
switch (item.getItemId()) {
case R.id.header_menu_download_all_immediately: {
displayMessage("Downloading all songs...");
break;
}
case R.id.header_menu_download_all_later: {
displayMessage("Downloads removed");
}
}
}
private void handleBodyItemMenuItemClicked(final LibraryItem item, final MenuItem menuItem) {
switch (menuItem.getItemId()) {
case R.id.song_menu_playNext: {
try {
displayMessage("Playing \"" + item.getTitle() + "\" next");
} catch (LibraryReadException e) {
displayMessage("Playing \"untitled\" next");
}
break;
}
case R.id.song_menu_addToQueue: {
try {
displayMessage("Added \"" + item.getTitle() + "\" to queue");
} catch (LibraryReadException e) {
displayMessage("Added \"untitled\" to queue");
}
break;
}
case R.id.song_menu_remove: {
try {
displayMessage("Deleted \"" + item.getTitle() + "\"");
} catch (LibraryReadException e) {
displayMessage("Deleted \"untitled\"");
}
bodyDataSource.deleteItem((Mp3Song) item);
}
}
}
private void handleBodyItemClicked(final LibraryItem item) {
try {
displayMessage("Playing \"" + item.getTitle() + "\"...");
} catch (LibraryReadException e) {
displayMessage("Playing \"untitled\"...");
}
}
private void displayMessage(final String message) {
Snackbar.make(rootView, message, Snackbar.LENGTH_LONG).show();
}
}
|
package com.yahoo.vespa.hosted.dockerapi;
import com.github.dockerjava.api.DockerClient;
import com.github.dockerjava.api.command.ExecCreateCmdResponse;
import com.github.dockerjava.api.command.InspectContainerResponse;
import com.github.dockerjava.api.command.InspectExecResponse;
import com.github.dockerjava.api.command.InspectImageResponse;
import com.github.dockerjava.api.command.UpdateContainerCmd;
import com.github.dockerjava.api.exception.DockerClientException;
import com.github.dockerjava.api.exception.NotFoundException;
import com.github.dockerjava.api.exception.NotModifiedException;
import com.github.dockerjava.api.model.HostConfig;
import com.github.dockerjava.api.model.Image;
import com.github.dockerjava.api.model.Statistics;
import com.github.dockerjava.core.DefaultDockerClientConfig;
import com.github.dockerjava.core.DockerClientConfig;
import com.github.dockerjava.core.DockerClientImpl;
import com.github.dockerjava.core.async.ResultCallbackTemplate;
import com.github.dockerjava.core.command.ExecStartResultCallback;
import com.github.dockerjava.core.command.PullImageResultCallback;
import com.github.dockerjava.jaxrs.JerseyDockerCmdExecFactory;
import com.google.inject.Inject;
import com.yahoo.log.LogLevel;
import com.yahoo.vespa.hosted.dockerapi.exception.ContainerNotFoundException;
import com.yahoo.vespa.hosted.dockerapi.exception.DockerException;
import com.yahoo.vespa.hosted.dockerapi.exception.DockerExecTimeoutException;
import com.yahoo.vespa.hosted.dockerapi.metrics.CounterWrapper;
import com.yahoo.vespa.hosted.dockerapi.metrics.Dimensions;
import com.yahoo.vespa.hosted.dockerapi.metrics.MetricReceiverWrapper;
import java.io.ByteArrayOutputStream;
import java.time.Duration;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class DockerImpl implements Docker {
private static final Logger logger = Logger.getLogger(DockerImpl.class.getName());
static final String LABEL_NAME_MANAGEDBY = "com.yahoo.vespa.managedby";
private static final String FRAMEWORK_CONTAINER_PREFIX = "/";
private static final Duration WAIT_BEFORE_KILLING = Duration.ofSeconds(10);
private final Object monitor = new Object();
private final Set<DockerImage> scheduledPulls = new HashSet<>();
private final DockerClient dockerClient;
private final DockerImageGarbageCollector dockerImageGC;
private final CounterWrapper numberOfDockerDaemonFails;
@Inject
public DockerImpl(MetricReceiverWrapper metricReceiverWrapper) {
this(createDockerClient(), metricReceiverWrapper);
}
DockerImpl(DockerClient dockerClient, MetricReceiverWrapper metricReceiver) {
this.dockerClient = dockerClient;
this.dockerImageGC = new DockerImageGarbageCollector(this);
Dimensions dimensions = new Dimensions.Builder().add("role", "docker").build();
numberOfDockerDaemonFails = metricReceiver.declareCounter(MetricReceiverWrapper.APPLICATION_DOCKER, dimensions, "daemon.api_fails");
}
@Override
public boolean pullImageAsyncIfNeeded(DockerImage image) {
try {
synchronized (monitor) {
if (scheduledPulls.contains(image)) return true;
if (imageIsDownloaded(image)) return false;
scheduledPulls.add(image);
logger.log(LogLevel.INFO, "Starting download of " + image.asString());
dockerClient.pullImageCmd(image.asString()).exec(new ImagePullCallback(image));
return true;
}
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to pull image '" + image.asString() + "'", e);
}
}
private void removeScheduledPoll(DockerImage image) {
synchronized (monitor) {
scheduledPulls.remove(image);
}
}
/**
* Check if a given image is already in the local registry
*/
boolean imageIsDownloaded(DockerImage dockerImage) {
return inspectImage(dockerImage).isPresent();
}
private Optional<InspectImageResponse> inspectImage(DockerImage dockerImage) {
try {
return Optional.of(dockerClient.inspectImageCmd(dockerImage.asString()).exec());
} catch (NotFoundException e) {
return Optional.empty();
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to inspect image '" + dockerImage.asString() + "'", e);
}
}
@Override
public CreateContainerCommand createContainerCommand(DockerImage image, ContainerName containerName) {
return new CreateContainerCommandImpl(dockerClient, image, containerName);
}
@Override
public ProcessResult executeInContainerAsUser(ContainerName containerName, String user, OptionalLong timeoutSeconds, String... command) {
try {
ExecCreateCmdResponse response = execCreateCmd(containerName, user, command);
ByteArrayOutputStream output = new ByteArrayOutputStream();
ByteArrayOutputStream errors = new ByteArrayOutputStream();
ExecStartResultCallback callback = dockerClient.execStartCmd(response.getId())
.exec(new ExecStartResultCallback(output, errors));
if (timeoutSeconds.isPresent()) {
if (!callback.awaitCompletion(timeoutSeconds.getAsLong(), TimeUnit.SECONDS))
throw new DockerExecTimeoutException(String.format(
"Command '%s' did not finish within %s seconds.", command[0], timeoutSeconds));
} else {
// Wait for completion no timeout
callback.awaitCompletion();
}
InspectExecResponse state = dockerClient.inspectExecCmd(response.getId()).exec();
if (state.isRunning())
throw new DockerException("Command '%s' did not finish within %s seconds.");
return new ProcessResult(state.getExitCode(), new String(output.toByteArray()), new String(errors.toByteArray()));
} catch (RuntimeException | InterruptedException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Container '" + containerName.asString()
+ "' failed to execute " + Arrays.toString(command), e);
}
}
private ExecCreateCmdResponse execCreateCmd(ContainerName containerName, String user, String... command) {
try {
return dockerClient.execCreateCmd(containerName.asString())
.withCmd(command)
.withAttachStdout(true)
.withAttachStderr(true)
.withUser(user)
.exec();
} catch (NotFoundException e) {
throw new ContainerNotFoundException(containerName);
}
}
private Optional<InspectContainerResponse> inspectContainerCmd(String container) {
try {
return Optional.of(dockerClient.inspectContainerCmd(container).exec());
} catch (NotFoundException ignored) {
return Optional.empty();
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to get info for container '" + container + "'", e);
}
}
@Override
public Optional<ContainerStats> getContainerStats(ContainerName containerName) {
try {
DockerStatsCallback statsCallback = dockerClient.statsCmd(containerName.asString()).exec(new DockerStatsCallback());
statsCallback.awaitCompletion(5, TimeUnit.SECONDS);
return statsCallback.stats.map(stats -> new ContainerStats(
stats.getNetworks(), stats.getCpuStats(), stats.getMemoryStats(), stats.getBlkioStats()));
} catch (NotFoundException ignored) {
return Optional.empty();
} catch (RuntimeException | InterruptedException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to get stats for container '" + containerName.asString() + "'", e);
}
}
@Override
public void startContainer(ContainerName containerName) {
try {
dockerClient.startContainerCmd(containerName.asString()).exec();
} catch (NotFoundException e) {
throw new ContainerNotFoundException(containerName);
} catch (NotModifiedException ignored) {
// If is already started, ignore
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to start container '" + containerName.asString() + "'", e);
}
}
@Override
public void stopContainer(ContainerName containerName) {
try {
dockerClient.stopContainerCmd(containerName.asString()).withTimeout((int) WAIT_BEFORE_KILLING.getSeconds()).exec();
} catch (NotFoundException e) {
throw new ContainerNotFoundException(containerName);
} catch (NotModifiedException ignored) {
// If is already stopped, ignore
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to stop container '" + containerName.asString() + "'", e);
}
}
@Override
public void deleteContainer(ContainerName containerName) {
try {
dockerClient.removeContainerCmd(containerName.asString()).exec();
} catch (NotFoundException e) {
throw new ContainerNotFoundException(containerName);
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to delete container '" + containerName.asString() + "'", e);
}
}
@Override
public void updateContainer(ContainerName containerName, ContainerResources resources) {
try {
UpdateContainerCmd updateContainerCmd = dockerClient.updateContainerCmd(containerName.asString())
.withCpuShares(resources.cpuShares())
.withMemory(resources.memoryBytes())
.withMemorySwap(resources.memoryBytes())
// Command line argument `--cpus c` is sent over to docker daemon as "NanoCPUs", which is the
// value of `c * 1e9`. This however, is just a shorthand for `--cpu-period p` and `--cpu-quota q`
// where p = 100000 and q = c * 100000.
// See: https://docs.docker.com/config/containers/resource_constraints/#configure-the-default-cfs-scheduler
// --cpus requires API 1.25+ on create and 1.29+ on update
// NanoCPUs is supported in docker-java as of 3.1.0 on create and not at all on update
// TODO: Simplify this to .withNanoCPUs(resources.cpu()) when docker-java supports it
.withCpuPeriod(resources.cpuPeriod())
.withCpuQuota(resources.cpuQuota());
updateContainerCmd.exec();
} catch (NotFoundException e) {
throw new ContainerNotFoundException(containerName);
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to update container '" + containerName.asString() + "' to " + resources, e);
}
}
@Override
public List<Container> getAllContainersManagedBy(String manager) {
return listAllContainers().stream()
.filter(container -> isManagedBy(container, manager))
.map(com.github.dockerjava.api.model.Container::getId)
.flatMap(this::asContainer)
.collect(Collectors.toList());
}
@Override
public Optional<Container> getContainer(ContainerName containerName) {
return asContainer(containerName.asString()).findFirst();
}
private Stream<Container> asContainer(String container) {
return inspectContainerCmd(container)
.map(response ->
new Container(
response.getConfig().getHostName(),
new DockerImage(response.getConfig().getImage()),
containerResourcesFromHostConfig(response.getHostConfig()),
new ContainerName(decode(response.getName())),
Container.State.valueOf(response.getState().getStatus().toUpperCase()),
response.getState().getPid()
))
.map(Stream::of)
.orElse(Stream.empty());
}
private static ContainerResources containerResourcesFromHostConfig(HostConfig hostConfig) {
// Docker keeps an internal state of what the period and quota are: in cgroups, the quota is always set
// (default is 100000), but docker will report it as 0 unless explicitly set by the user.
// This may lead to a state where the quota is set, but period is 0 (accord to docker), which will
// mess up the calculation below. This can only happen if someone sets it manually, since this class
// will always set both quota and period.
final double cpus = hostConfig.getCpuQuota() > 0 ?
(double) hostConfig.getCpuQuota() / hostConfig.getCpuPeriod() : 0;
return new ContainerResources(cpus, hostConfig.getCpuShares(), hostConfig.getMemory());
}
private boolean isManagedBy(com.github.dockerjava.api.model.Container container, String manager) {
final Map<String, String> labels = container.getLabels();
return labels != null && manager.equals(labels.get(LABEL_NAME_MANAGEDBY));
}
private String decode(String encodedContainerName) {
return encodedContainerName.substring(FRAMEWORK_CONTAINER_PREFIX.length());
}
List<com.github.dockerjava.api.model.Container> listAllContainers() {
try {
return dockerClient.listContainersCmd().withShowAll(true).exec();
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to list all containers", e);
}
}
List<Image> listAllImages() {
try {
return dockerClient.listImagesCmd().withShowAll(true).exec();
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to list all images", e);
}
}
void deleteImage(DockerImage dockerImage) {
try {
dockerClient.removeImageCmd(dockerImage.asString()).exec();
} catch (NotFoundException ignored) {
// Image was already deleted, ignore
} catch (RuntimeException e) {
numberOfDockerDaemonFails.add();
throw new DockerException("Failed to delete docker image " + dockerImage.asString(), e);
}
}
@Override
public boolean deleteUnusedDockerImages(List<DockerImage> excludes, Duration minImageAgeToDelete) {
return dockerImageGC.deleteUnusedDockerImages(excludes, minImageAgeToDelete);
}
private class ImagePullCallback extends PullImageResultCallback {
private final DockerImage dockerImage;
private ImagePullCallback(DockerImage dockerImage) {
this.dockerImage = dockerImage;
}
@Override
public void onError(Throwable throwable) {
removeScheduledPoll(dockerImage);
logger.log(LogLevel.ERROR, "Could not download image " + dockerImage.asString(), throwable);
}
@Override
public void onComplete() {
if (imageIsDownloaded(dockerImage)) {
logger.log(LogLevel.INFO, "Download completed: " + dockerImage.asString());
removeScheduledPoll(dockerImage);
} else {
throw new DockerClientException("Could not download image: " + dockerImage);
}
}
}
// docker-java currently (3.0.8) does not support getting docker stats with stream=false, therefore we need
// to subscribe to the stream and complete as soon we get the first result.
private class DockerStatsCallback extends ResultCallbackTemplate<DockerStatsCallback, Statistics> {
private Optional<Statistics> stats = Optional.empty();
private final CountDownLatch completed = new CountDownLatch(1);
@Override
public void onNext(Statistics stats) {
if (stats != null) {
this.stats = Optional.of(stats);
completed.countDown();
onComplete();
}
}
@Override
public boolean awaitCompletion(long timeout, TimeUnit timeUnit) throws InterruptedException {
// For some reason it takes as long to execute onComplete as the awaitCompletion timeout is, therefore
// we have own awaitCompletion that completes as soon as we get the first result.
return completed.await(timeout, timeUnit);
}
}
private static DockerClient createDockerClient() {
JerseyDockerCmdExecFactory dockerFactory = new JerseyDockerCmdExecFactory()
.withMaxPerRouteConnections(10)
.withMaxTotalConnections(100)
.withConnectTimeout((int) Duration.ofSeconds(100).toMillis());
DockerClientConfig dockerClientConfig = new DefaultDockerClientConfig.Builder()
.withDockerHost("unix:///var/run/docker.sock")
.build();
return DockerClientImpl.getInstance(dockerClientConfig)
.withDockerCmdExecFactory(dockerFactory);
}
}
|
package functional;
import controllers.html.Utils;
import org.junit.Test;
import play.libs.Json;
import uk.gov.openregister.model.Cardinality;
import uk.gov.openregister.model.Datatype;
import uk.gov.openregister.model.Field;
import java.util.Optional;
import static org.fest.assertions.Assertions.assertThat;
public class UtilsTest extends ApplicationTests {
public static final Field A_FIELD = new Field("aField");
public static final Field FIELD_WITH_REGISTER = new Field("fields", "Field", Datatype.of("list"), Cardinality.ONE, Optional.of("field"));
public static final Field FIELD_WITH_CURIE = new Field("person-or-company", "A person or a company", Datatype.CURIE, Cardinality.ONE, Optional.empty());
@Test
public void testRenderAStringValue() throws Exception {
assertThat(Utils.toValue(A_FIELD, Json.parse("\"value\"")).text()).isEqualTo("value");
}
@Test
public void testRenderALinkToRegister() throws Exception {
assertThat(Utils.toValue(FIELD_WITH_REGISTER, Json.parse("\"value\"")).text()).isEqualTo("<a class=\"link_to_register\" href=\"http://localhost:8888/field/value\">value</a>");
}
@Test
public void testRenderACurie() throws Exception {
assertThat(Utils.toValue(FIELD_WITH_CURIE, Json.parse("\"person-or-company:an-id\"")).text()).isEqualTo("<a class=\"link_to_register\" href=\"http://localhost:8888/person-or-company/an-id\">person-or-company:an-id</a>");
}
@Test
public void testRenderALinkToAField() throws Exception {
assertThat(Utils.toLink(A_FIELD).text()).isEqualTo("<a class=\"link_to_register\" href=\"http://localhost:8888/field/aField\">aField</a>");
}
@Test
public void testRenderALinkToADatatype() throws Exception {
assertThat(Utils.toLink(Datatype.STRING).text()).isEqualTo("<a class=\"link_to_register\" href=\"http://localhost:8888/datatype/string\">string</a>");
}
@Test
public void testRenderAnArrayOfValues() throws Exception {
assertThat(Utils.toValue(A_FIELD, Json.parse("[\"value1\",\"value2\"]")).text()).isEqualTo("[ value1, value2 ]");
}
@Test
public void testRenderAnArrayOfLinks() throws Exception {
assertThat(Utils.toValue(FIELD_WITH_REGISTER, Json.parse("[\"value1\",\"value2\"]")).text())
.isEqualTo("[ <a class=\"link_to_register\" href=\"http://localhost:8888/field/value1\">value1</a>, " +
"<a class=\"link_to_register\" href=\"http://localhost:8888/field/value2\">value2</a> ]");
}
@Test
public void testDisplayFieldIsTrueForHash() throws Exception {
assertThat(Utils.isDisplayField("hash", "notcare")).isTrue();
}
@Test
public void testDisplayFieldIsFalseForRandomField() throws Exception {
assertThat(Utils.isDisplayField("banana", "notcare")).isFalse();
}
@Test
public void testDisplayFieldIsTrueForRegisterKey() throws Exception {
assertThat(Utils.isDisplayField("registername", "registername")).isTrue();
}
@Test
public void testDisplayFieldIsTrueForNameField() throws Exception {
assertThat(Utils.isDisplayField("name", "notcare")).isTrue();
}
@Test
public void testDisplayFieldIsTrueForStreetField() throws Exception {
assertThat(Utils.isDisplayField("street", "some-register")).isTrue();
}
}
|
package net.acomputerdog.BlazeLoader.api.block;
import net.acomputerdog.BlazeLoader.annotation.Beta;
import net.acomputerdog.BlazeLoader.api.base.ApiBase;
import net.acomputerdog.BlazeLoader.main.BlazeLoader;
import net.minecraft.src.Block;
import net.minecraft.src.WorldServer;
/**
* Api for block-specific functions
*/
public class ApiBlock {
@Beta(stable = true)
/**
* Gets an available block ID. Throws a RuntimeException if none are available.
* @return Returns a free Block ID
*/
public static int getFreeBlockId(){
if(Block.blocksList[BlazeLoader.freeBlockId] == null){
int id = BlazeLoader.freeBlockId;
BlazeLoader.freeBlockId++;
return id;
}
else{
int id = BlazeLoader.updateFreeBlockId();
BlazeLoader.freeBlockId++;
return id;
}
}
@Beta(stable = true)
/**
* Gets an available block ID, checking for used IDs that have been freed.
* Throws a RuntimeException if none are available.
* @return Returns a free Block ID.
*/
public static int recheckBlockIds(){
int id = BlazeLoader.resetFreeBlockId();
BlazeLoader.freeBlockId++;
return id;
}
/**
* Sets the block at a specified location.
* @param world The world to change the block in. Should be a dimension index returned by getDimensionIndex.
* @param x The X-coordinate to change.
* @param y The Y-coordinate to change.
* @param z The Z-coordinate to change.
* @param id The block ID to set.
* @param metadata The block Metadata to set.
* @param notifyFlag The notification flags. Should be the value(s) of ENotificationType
*/
public static void setBlock(int world, int x, int y, int z, int id, int metadata, int notifyFlag){
getServerForDimension(world).setBlock(x, y, z, id, metadata, notifyFlag);
}
/**
* Gets the IntegratedServer.worldServers[] index of the specified world. As of MC1.6.2 the only possible values are -1, 0, and 1.
* @param dimensionLevel The dimension to get the index of.
* @return Return the index of the dimension.
*/
public static int getDimensionIndex(int dimensionLevel){
if(dimensionLevel == -1){
return 1;
}else if(dimensionLevel == 1){
return 2;
}else{
return dimensionLevel;
}
}
/**
* Gets the world for the specified dimension. Should be a dimension index returned by getDimensionIndex.
* @param dimension The dimension to get.
* @return The WorldServer for the specified index.
*/
public static WorldServer getServerForDimension(int dimension){
return ApiBase.theMinecraft.getIntegratedServer().worldServers[dimension];
}
/**
* Gets the Block ID of a location.
* @param world The world to get the ID from.
* @param x The X-coordinate to get.
* @param y The Y-coordinate to get.
* @param z The Z-coordinate to get.
* @return Return the block ID at the specified location.
*/
public static int getBlockId(int world, int x, int y, int z){
return getServerForDimension(world).getBlockId(x, y, z);
}
/**
* Gets the Block Metadata of a location.
* @param world The world to get the Metadata from.
* @param x The X-coordinate to get.
* @param y The Y-coordinate to get.
* @param z The Z-coordinate to get.
* @return Return the block Metadata at the specified location.
*/
public static int getBlockMetadata(int world, int x, int y, int z){
return getServerForDimension(world).getBlockMetadata(x, y, z);
}
}
|
package com.fullmetalgalaxy.server;
import java.io.IOException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItemIterator;
import org.apache.commons.fileupload.FileItemStream;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.fileupload.util.Streams;
import com.fullmetalgalaxy.model.AuthProvider;
import com.fullmetalgalaxy.model.persist.Game;
import com.fullmetalgalaxy.server.EbAccount.NotificationQty;
import com.fullmetalgalaxy.server.pm.FmgMessage;
import com.google.appengine.api.datastore.QueryResultIterator;
import com.google.appengine.api.users.UserServiceFactory;
import com.googlecode.objectify.Query;
/**
* @author Vincent
*
*/
public class AccountServlet extends HttpServlet
{
private static final long serialVersionUID = -4916146982326069190L;
private final static FmpLogger log = FmpLogger.getLogger( AccountServlet.class.getName() );
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
@Override
protected void doGet(HttpServletRequest p_request, HttpServletResponse p_response)
throws ServletException, IOException
{
if( p_request.getParameter( "profil" ) != null )
{
// redirect to real profil url
long id = 0;
try
{
id = Long.parseLong( p_request.getParameter( "profil" ) );
} catch( Exception e )
{
}
if( id != 0 )
{
EbAccount account = FmgDataStore.dao().get( EbAccount.class, id );
p_response.sendRedirect( account.getProfileUrl() );
}
else
{
p_response.sendRedirect( "/genericmsg.jsp?title=Erreur: utilisateur non trouvé" );
}
}
else if( p_request.getParameter( "logout" ) != null )
{
// user logout
if( Auth.isUserLogged( p_request, p_response ) )
{
Auth.disconnectFmgUser( p_request );
}
String continueUrl = p_request.getParameter( "continue" );
if( continueUrl == null )
{
continueUrl = "/";
}
String redirectUrl = UserServiceFactory.getUserService().createLogoutURL( continueUrl );
if( p_request.getParameter( "logout" ).equalsIgnoreCase( "fmgonly" ) )
{
redirectUrl = continueUrl;
}
p_response.sendRedirect( redirectUrl );
}
else if( p_request.getParameter( "link" ) != null )
{
// user link FMG and Forum account
Query<EbAccount> query = FmgDataStore.dao().query( EbAccount.class )
.filter( "m_forumKey", p_request.getParameter( "link" ) );
QueryResultIterator<EbAccount> it = query.iterator();
if( !it.hasNext() )
{
p_response.sendRedirect( "/genericmsg.jsp?title=Erreur: clef non trouvé" );
return;
}
EbAccount account = it.next();
if( !Auth.isUserLogged( p_request, p_response ) )
{
// arg, user must be connected for this !
String redirectUrl = Auth.getFmgLoginURL( p_request, p_response );
if( account.getAuthProvider() == AuthProvider.Google )
{
redirectUrl = Auth.getGoogleLoginURL( p_request, p_response );
}
p_response.sendRedirect( redirectUrl );
return;
}
if( Auth.getUserAccount( p_request, p_response ).getId() != account.getId() )
{
p_response
.sendRedirect( "/genericmsg.jsp?title=Erreur: la clef ne correspond pas au compte "
+ Auth.getUserPseudo( p_request, p_response ) );
return;
}
account.setIsforumIdConfirmed( true );
ServerUtil.forumConnector().pullAccount( account );
FmgDataStore ds = new FmgDataStore( false );
ds.put( account );
ds.close();
p_response.sendRedirect( "/genericmsg.jsp?title=les comptes '" + account.getPseudo()
+ "' de FMG et du Forum sont liés" );
return;
}
else if( p_request.getParameter( "retrywebhook" ) != null )
{
// retry a webhook
try{
EbAccount account = FmgDataStore.dao().get( EbAccount.class,
Long.parseLong( p_request.getParameter( "account" ) ) );
Game game = FmgDataStore.dao().getGame( Long.parseLong( p_request.getParameter( "retrywebhook" ) ) );
new WebHook( game, account ).start();
} catch( Exception e )
{
p_response.sendRedirect( "/genericmsg.jsp?title=Unkown error " + Auth.getUserPseudo( p_request, p_response ) );
return;
}
p_response.sendRedirect( "/genericmsg.jsp?title=Webhook retry is launched"
+ Auth.getUserPseudo( p_request, p_response ) );
}
else
{
// Unknown user action
p_response.sendRedirect( "/" );
}
}
/* (non-Javadoc)
* @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse)
*/
@Override
protected void doPost(HttpServletRequest p_request, HttpServletResponse p_response)
throws ServletException, IOException
{
ServletFileUpload upload = new ServletFileUpload();
Map<String, String> params = new HashMap<String, String>();
boolean isConnexion = false;
boolean isPassword = false;
try
{
// Parse the request
FileItemIterator iter = upload.getItemIterator( p_request );
while( iter.hasNext() )
{
FileItemStream item = iter.next();
if( item.isFormField() )
{
if( item.getFieldName().equalsIgnoreCase( "connexion" ) )
{
isConnexion = true;
}
if( item.getFieldName().equalsIgnoreCase( "password" ) )
{
isPassword = true;
}
params.put( item.getFieldName(), Streams.asString( item.openStream(), "UTF-8" ) );
}
}
} catch( FileUploadException e )
{
log.error( e );
}
if( isConnexion )
{
// user try to connect with an FMG account
boolean isConnected = true;
isConnected = connectFmgUser( p_request, p_response, params );
if( isConnected )
{
String continueUrl = params.get( "continue" );
if( continueUrl == null )
{
// by default, my games is the default url
continueUrl = "/gamelist.jsp";
}
p_response.sendRedirect( continueUrl );
}
return;
}
else if( isPassword )
{
// user ask for his password to be send on his email
String msg = "";
Query<EbAccount> query = FmgDataStore.dao().query( EbAccount.class ).filter( "m_email", params.get( "email" ) );
QueryResultIterator<EbAccount> it = query.iterator();
if( !it.hasNext() )
{
msg = "l'adresse mail " + params.get( "email" ) + " n'a pas été trouvé";
}
else
{
EbAccount account = it.next();
if( account.getLastPasswordAsk() != null
&& account.getLastPasswordAsk().getTime() > System.currentTimeMillis() - (1000*60*60*24) )
{
msg = "une seule demande par jour";
}
else if( account.getAuthProvider() != AuthProvider.Fmg )
{
msg = "ce compte FMG est associé a un compte google";
}
else
{
// all is ok, send a mail
new FmgMessage( "askPassword" ).sendEMail( account );
msg = "un email a été envoyé à " + account.getEmail();
FmgDataStore ds = new FmgDataStore( false );
account = ds.get( EbAccount.class, account.getId() );
account.setLastPasswordAsk( new Date() );
ds.put( account );
ds.close();
}
}
p_response.sendRedirect( "/password.jsp?msg="+msg );
return;
}
else
{
// update or create new account
String msg = checkParams( params );
if( msg != null )
{
p_response.sendRedirect( "/account.jsp?msg=" + msg );
return;
}
msg = saveAccount( p_request, p_response, params );
if( msg != null )
{
p_response.sendRedirect( "/account.jsp?msg=" + msg );
return;
}
else
{
if( !Auth.isUserLogged( p_request, p_response ) )
{
Auth.connectUser( p_request, params.get( "login" ) );
}
if( "0".equalsIgnoreCase( params.get( "accountid" ) ) )
{
// return page new games
p_response.sendRedirect( "/gamelist.jsp?tab=0" );
}
else
{
// stay editing profile
p_response.sendRedirect( "/profile.jsp?id=" + params.get( "accountid" ) );
}
return;
}
}
}
/**
* try to connect an FMG (not google or other credential) user
* @param p_request
* @param p_response
* @param params
* @return false if connection failed and p_response is redirected.
* @throws IOException
*/
private boolean connectFmgUser(HttpServletRequest p_request, HttpServletResponse p_response,
Map<String, String> params) throws IOException
{
String login = params.get( "login" );
if( login == null || login.isEmpty() )
{
p_response.sendRedirect( "/auth.jsp?msg=login ou mot de passe invalide" );
return false;
}
FmgDataStore ds = new FmgDataStore(true);
Query<EbAccount> query = ds.query( EbAccount.class ).filter( "m_login", login );
EbAccount account = query.get();
if( account == null )
{
query = ds.query( EbAccount.class ).filter( "m_compactPseudo", ServerUtil.compactTag( login ) );
account = query.get();
}
if( account == null )
{
p_response.sendRedirect( "/auth.jsp?msg=login ou mot de passe invalide" );
return false;
}
login = account.getLogin();
params.put( "login", login );
p_request.setAttribute( "login", login );
// if user is already connected as admin: don't check password and allow connect to another user
if( !Auth.isUserAdmin( p_request, p_response ))
{
if( account.getAuthProvider() != AuthProvider.Fmg )
{
p_response.sendRedirect( Auth.getGoogleLoginURL( p_request, p_response ) );
return false;
}
String password = params.get( "password" );
if( password == null )
{
p_response.sendRedirect( "/auth.jsp?msg=login ou mot de passe invalide" );
return false;
}
if( account == null || account.getPassword() == null
|| !account.getPassword().equals( password ) )
{
p_response.sendRedirect( "/auth.jsp?msg=login ou mot de passe invalide" );
return false;
}
}
// all seams ok: connect user
Auth.connectUser( p_request, login );
return true;
}
/**
*
* @param params
* @return null if all ok, an error message otherwise
*/
private String checkParams(Map<String, String> params)
{
if( params.get( "authprovider" ).equalsIgnoreCase( "Fmg" ) )
{
String pass1 = params.get( "password1" );
String pass2 = params.get( "password2" );
if( pass1 == null || pass2 == null || !pass1.equals( pass2 ) )
{
return "vous devez tapper le meme mot de passe";
}
if( !pass1.isEmpty() )
{
params.put( "password", pass1 );
}
}
if( params.get( "accountid" ) == null )
return "pas de champs accountid";
if( params.get( "login" ) == null || params.get( "login" ).length() < 4 )
return "votre login doit faire plus de 3 caracteres";
if( params.get( "pseudo" ) != null && params.get( "pseudo" ).length() > 0
&& params.get( "pseudo" ).length() < 4 )
return "votre pseudo doit faire plus de 3 caracteres";
return null;
}
/**
*
* @param params
* @return null if saved successfully, an error message otherwise
*/
private String saveAccount(HttpServletRequest p_request, HttpServletResponse p_response,
Map<String, String> params)
{
String strid = params.get( "accountid" );
assert strid != null;
long id = Long.parseLong( strid );
FmgDataStore store = new FmgDataStore(false);
EbAccount account = null;
if( id == 0 )
{
// we are creating a new account
account = new EbAccount();
// lets check that login ins't took already
if( FmgDataStore.isPseudoExist( params.get( "login" ) ) )
{
store.rollback();
return "Ce pseudo existe deja";
}
if( !EbAccount.isValidPseudo( params.get( "login" ) ) )
{
store.rollback();
return "Ce pseudo est invalide";
}
GlobalVars.incrementAccountCount( 1 );
}
else
{
if( id != Auth.getUserAccount( p_request, p_response ).getId()
&& !Auth.isUserAdmin( p_request, p_response ) )
{
store.rollback();
return "Vous n'avez pas le droit de faire ces modifs";
}
// just update an account
account = store.get( EbAccount.class, id );
if( params.get( "pseudo" ) != null
&& (account.getPseudo() == null || !account.getPseudo().equalsIgnoreCase(
params.get( "pseudo" ) )) )
{
// lets check that pseudo ins't took already
if( FmgDataStore.isPseudoExist( params.get( "pseudo" ) ) )
{
store.rollback();
return "Ce pseudo existe deja";
}
// check that user is allowed to change his pseudo
if( !account.canChangePseudo() && !Auth.isUserAdmin( p_request, p_response ) )
{
store.rollback();
return "Vous ne pouvez pas modifier votre pseudo";
}
account.setPseudo( params.get( "pseudo" ) );
}
if( params.get( "credential" ) != null )
{
// update auth provider and login !
account.setAuthProvider( AuthProvider.valueOf( params.get( "authprovider" ) ) );
account.setLogin( params.get( "login" ) );
}
}
if( params.get( "avatarurl" ) != null )
{
account.setForumAvatarUrl( params.get( "avatarurl" ) );
}
account.setAllowMsgFromPlayer( params.get( "AllowMsgFromPlayer" ) != null );
account.setHideEmailToPlayer( params.get( "HideEmailToPlayer" ) != null );
account.setNotificationQty( NotificationQty.valueOf( params.get( "NotificationQty" ) ) );
account.setEmail( params.get( "email" ) );
account.setJabberId( params.get( "jabberId" ) );
account.setWebHook( params.get( "webhook" ) );
if( account.isTrancient() )
{
account.setLogin( params.get( "login" ) );
}
if( params.get( "password" ) != null )
{
account.setPassword( params.get( "password" ) );
}
if( account.getAuthProvider() == AuthProvider.Fmg
&& (account.getPassword() == null || account.getPassword().isEmpty()) )
{
store.rollback();
return "Vous devez definir un mot de passe";
}
if( id == 0 && params.containsKey( "createforumaccount" ) )
{
// a new account was created: check if we need to create new forum account
if( ServerUtil.forumConnector().createAccount( account ) )
{
account.setIsforumIdConfirmed( true );
}
}
store.put( account );
store.close();
// to reload account data from datastore
p_request.getSession().setAttribute( "account", null );
return null;
}
}
|
package com.growingwiththeweb.sorting;
public class OddEvenSort {
public static <T extends Comparable<T>> void sort(T[] array) {
boolean sorted = false;
while (!sorted) {
sorted = innerSort(array, 1);
sorted = innerSort(array, 0) && sorted;
}
}
private static <T extends Comparable<T>> boolean innerSort(T[] array, Integer i) {
boolean sorted = true;
for (; i < array.length - 1; i += 2)
{
if (array[i].compareTo(array[i + 1]) > 0)
{
swap(array, i, i + 1);
sorted = false;
}
}
return sorted;
}
private static <T extends Comparable<T>> void swap(
T[] array, int a, int b) {
T temp = array[a];
array[a] = array[b];
array[b] = temp;
}
}
|
package com.lekebilen.quasseldroid;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.Socket;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.net.SocketFactory;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
import android.content.SharedPreferences;
import com.lekebilen.quasseldroid.qtcomm.DataStreamVersion;
import com.lekebilen.quasseldroid.qtcomm.QDataInputStream;
import com.lekebilen.quasseldroid.qtcomm.QDataOutputStream;
import com.lekebilen.quasseldroid.qtcomm.QMetaType;
import com.lekebilen.quasseldroid.qtcomm.QMetaTypeRegistry;
import com.lekebilen.quasseldroid.qtcomm.QVariant;
public class CoreConnection {
private enum RequestType {
Invalid(0),
Sync(1),
RpcCall(2),
InitRequest(3),
InitData(4),
HeartBeat(5),
HeartBeatReply(6);
int value;
RequestType(int value){
this.value = value;
}
public int getValue(){
return value;
}
public static RequestType getForVal(int val) {
for (RequestType type: values()) {
if (type.value == val)
return type;
}
return Invalid;
}
}
private QDataOutputStream outStream;
private QDataInputStream inStream;
private Map<Integer, Buffer> buffers;
public static void main(String[] args) {
try {
CoreConnection conn = new CoreConnection("localhost", 4242, "test", "test", null);
} catch (UnknownHostException e) {
System.err.println("Unknown host!");
} catch (IOException e) {
e.printStackTrace();
} catch (GeneralSecurityException e) {
System.err.println("Security error!");
e.printStackTrace();
}
}
private SharedPreferences settings;
public CoreConnection(String host, int port, String username, String password, SharedPreferences settings)
throws UnknownHostException, IOException, GeneralSecurityException {
this.settings = settings;
// START CREATE SOCKETS
SocketFactory factory = (SocketFactory)SocketFactory.getDefault();
Socket socket = (Socket)factory.createSocket(host, port);
outStream = new QDataOutputStream(socket.getOutputStream());
// END CREATE SOCKETS
// START CLIENT INFO
Map<String, QVariant<?>> initial = new HashMap<String, QVariant<?>>();
DateFormat dateFormat = new SimpleDateFormat("MMM dd yyyy HH:mm:ss");
Date date = new Date();
initial.put("ClientDate", new QVariant<String>(dateFormat.format(date), QVariant.Type.String));
initial.put("UseSsl", new QVariant<Boolean>(settings.getBoolean("useSSL", true), QVariant.Type.Bool));
initial.put("ClientVersion", new QVariant<String>("v0.6.1 (dist-<a href='http://git.quassel-irc.org/?p=quassel.git;a=commit;h=611ebccdb6a2a4a89cf1f565bee7e72bcad13ffb'>611ebcc</a>)", QVariant.Type.String));
initial.put("UseCompression", new QVariant<Boolean>(false, QVariant.Type.Bool));
initial.put("MsgType", new QVariant<String>("ClientInit", QVariant.Type.String));
initial.put("ProtocolVersion", new QVariant<Integer>(10, QVariant.Type.Int));
sendQVariantMap(initial);
// END CLIENT INFO
// START CORE INFO
inStream = new QDataInputStream(socket.getInputStream());
Map<String, QVariant<?>> reply = readQVariantMap();
System.out.println("CORE INFO: ");
for (String key : reply.keySet()) {
System.out.println("\t" + key + " : " + reply.get(key));
}
// TODO: We should check that the core is new and dandy here.
// END CORE INFO
// START SSL CONNECTION
if (settings.getBoolean("useSSL", true)) {
SSLContext sslContext = SSLContext.getInstance("TLS");
TrustManager[] trustManagers = new TrustManager [] { new CustomTrustManager() };
sslContext.init(null, trustManagers, null);
SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory();
SSLSocket sslSocket = (SSLSocket) sslSocketFactory.createSocket(socket, host, port, true);
sslSocket.setEnabledProtocols(new String[] {"SSLv3"});
sslSocket.setUseClientMode(true);
sslSocket.startHandshake();
inStream = new QDataInputStream(sslSocket.getInputStream());
outStream = new QDataOutputStream(sslSocket.getOutputStream());
}
// FINISHED SSL CONNECTION
// START LOGIN
Map<String, QVariant<?>> login = new HashMap<String, QVariant<?>>();
login.put("MsgType", new QVariant<String>("ClientLogin", QVariant.Type.String));
login.put("User", new QVariant<String>(username, QVariant.Type.String));
login.put("Password", new QVariant<String>(password, QVariant.Type.String));
sendQVariantMap(login);
// FINISH LOGIN
// START LOGIN ACK
reply = readQVariantMap();
if (!reply.get("MsgType").toString().equals("ClientLoginAck"))
throw new GeneralSecurityException("Invalid password?");
// END LOGIN ACK
// START SESSION INIT
reply = readQVariantMap();
System.out.println("SESSION INIT: ");
for (String key : reply.keySet()) {
System.out.println("\t" + key + " : " + reply.get(key));
}
Map<String, QVariant<?>> sessionState = (Map<String, QVariant<?>>) reply.get("SessionState").getData();
List<QVariant<?>> bufferInfos = (List<QVariant<?>>) sessionState.get("BufferInfos").getData();
buffers = new HashMap<Integer, Buffer>();
for (QVariant<?> bufferInfoQV: bufferInfos) {
BufferInfo bufferInfo = (BufferInfo)bufferInfoQV.getData();
buffers.put(bufferInfo.id, new Buffer(bufferInfo));
}
// END SESSION INIT
// Now the fun part starts, where we play signal proxy
// START SIGNAL PROXY INIT
sendInitRequest("BacklogManager", "");
sendInitRequest("Network", "1");
sendInitRequest("BufferSyncer", "");
List<QVariant<?>> packedFunc = new LinkedList<QVariant<?>>();
packedFunc.add(new QVariant<Integer>(RequestType.Sync.getValue(), QVariant.Type.Int));
packedFunc.add(new QVariant<String>("BufferSyncer", QVariant.Type.String));
packedFunc.add(new QVariant<String>("", QVariant.Type.String));
packedFunc.add(new QVariant<String>("requestSetLastSeenMsg", QVariant.Type.String));
packedFunc.add(new QVariant<Integer>(1, "BufferId"));
packedFunc.add(new QVariant<Integer>(1, "MsgId"));
sendQVariantList(packedFunc);
ReadThread readThread = new ReadThread(this);
readThread.start();
// Apparently the client doesn't send heartbeats?
/*TimerTask sendPingAction = new TimerTask() {
public void run() {
}
};*/
// END SIGNAL PROXY
}
/**
* Returns list of buffers in use.
* @return
*/
public Buffer [] getBuffers() {
return (Buffer[]) buffers.values().toArray();
}
private class ReadThread extends Thread {
boolean running = false;
CoreConnection parent;
public ReadThread(CoreConnection parent) {
this.parent = parent;
}
public void run() {
this.running = true;
List<QVariant<?>> packedFunc;
while (running) {
try {
packedFunc = readQVariantList();
} catch (IOException e) {
running = false;//FIXME: handle this properly?
System.err.println("IO error!");
e.printStackTrace();
return;
}
RequestType type = RequestType.getForVal((Integer)packedFunc.remove(0).getData());
String name;
switch (type) {
case HeartBeat:
System.out.println("Got heartbeat");
break;
case InitData:
name = new String(((ByteBuffer)packedFunc.remove(0).getData()).array());
if (name.equals("Network")) {
// Do nothing, for now
} else if (name.equals("BufferSyncer")) {
packedFunc.remove(0); // Object name, not used
List<QVariant<?>> lastSeen = (List<QVariant<?>>) ((Map<String, QVariant<?>>)packedFunc.get(0).getData()).get("LastSeenMsg").getData();
for (int i=0; i<lastSeen.size()/2; i++) {
int bufferId = (Integer)lastSeen.remove(0).getData();
int msgId = (Integer)lastSeen.remove(0).getData();
if (buffers.containsKey(bufferId)) // We only care for buffers we have open
buffers.get(bufferId).setLastSeenMessage(msgId);
}
List<QVariant<?>> markerLines = (List<QVariant<?>>) ((Map<String, QVariant<?>>)packedFunc.get(0).getData()).get("MarkerLines").getData();
for (int i=0; i<lastSeen.size()/2; i++) {
int bufferId = (Integer)lastSeen.remove(0).getData();
int msgId = (Integer)lastSeen.remove(0).getData();
if (buffers.containsKey(bufferId))
buffers.get(bufferId).setMarkerLineMessage(msgId);
}
for (int buffer: buffers.keySet()) {
requestBacklog(buffer, buffers.get(buffer).getLastSeenMessage());
}
} else {
System.out.println("InitData: " + name);
}
break;
case Sync:
String className = packedFunc.remove(0).toString();
packedFunc.remove(0); // object name, we don't really care
String function = packedFunc.remove(0).toString();
if (className.equals("BacklogManager") && function.equals("receiveBacklog")) {
int buffer = (Integer) packedFunc.remove(0).getData();
packedFunc.remove(0); // first
packedFunc.remove(0); // last
packedFunc.remove(0); // limit
packedFunc.remove(0); // additional
for (QVariant<?> message: (List<QVariant<?>>)(packedFunc.remove(0).getData())) {
buffers.get(buffer).addBacklog((Message) message.getData());
}
} else {
System.out.println("Sync request: " + className + "::" + function);
}
break;
case RpcCall:
String functionName = packedFunc.remove(0).toString();
// int buffer = functionName.charAt(0);
// functionName = functionName.substring(1);
if (functionName.equals("2displayMsg(Message)")) {
Message message = (Message) packedFunc.remove(0).getData();
buffers.get(message.bufferInfo.id).addBacklog(message);
} else {
System.out.println("RpcCall: " + functionName + " (" + packedFunc + ").");
}
break;
default:
System.out.println(type);
}
}
}
}
private void sendQVariant(QVariant<?> data) throws IOException {
// See how much data we're going to send
ByteArrayOutputStream baos = new ByteArrayOutputStream();
QDataOutputStream bos = new QDataOutputStream(baos);
QMetaTypeRegistry.serialize(QMetaType.Type.QVariant, bos, data);
// Tell the other end how much data to expect
outStream.writeUInt(bos.size(), 32);
// Sanity check, check that we can decode our own stuff before sending it off
QDataInputStream bis = new QDataInputStream(new ByteArrayInputStream(baos.toByteArray()));
QMetaTypeRegistry.instance().getTypeForId(QMetaType.Type.QVariant.getValue()).getSerializer().unserialize(bis, DataStreamVersion.Qt_4_2);
// Send data
QMetaTypeRegistry.serialize(QMetaType.Type.QVariant, outStream, data);
}
private void sendQVariantMap(Map<String, QVariant<?>> data) throws IOException {
QVariant<Map<String, QVariant<?>>> bufstruct = new QVariant<Map<String, QVariant<?>>>(data, QVariant.Type.Map);
sendQVariant(bufstruct);
}
private void sendQVariantList(List<QVariant<?>> data) throws IOException {
QVariant<List<QVariant<?>>> bufstruct = new QVariant<List<QVariant<?>>>(data, QVariant.Type.List);
sendQVariant(bufstruct);
}
private Map<String, QVariant<?>> readQVariantMap() throws IOException {
long len = inStream.readUInt(32);
QVariant <Map<String, QVariant<?>>> v = (QVariant <Map<String, QVariant<?>>>)QMetaTypeRegistry.unserialize(QMetaType.Type.QVariant, inStream);
Map<String, QVariant<?>>ret = (Map<String, QVariant<?>>)v.getData();
return ret;
}
private List<QVariant<?>> readQVariantList() throws IOException {
long len = inStream.readUInt(32);
QVariant <List<QVariant<?>>> v = (QVariant <List<QVariant<?>>>)QMetaTypeRegistry.unserialize(QMetaType.Type.QVariant, inStream);
List<QVariant<?>>ret = (List<QVariant<?>>)v.getData();
return ret;
}
private void sendInitRequest(String className, String objectName) throws IOException {
List<QVariant<?>> packedFunc = new LinkedList<QVariant<?>>();
packedFunc.add(new QVariant<Integer>(RequestType.InitRequest.getValue(), QVariant.Type.Int));
packedFunc.add(new QVariant<String>(className, QVariant.Type.String));
packedFunc.add(new QVariant<String>(objectName, QVariant.Type.String));
sendQVariantList(packedFunc);
}
private void requestBacklog(int buffer, int first) {
requestBacklog(buffer, first, -1);
}
private void requestBacklog(int buffer, int firstMsg, int lastMsg) {
List<QVariant<?>> retFunc = new LinkedList<QVariant<?>>();
retFunc.add(new QVariant<Integer>(RequestType.Sync.getValue(), QVariant.Type.Int));
retFunc.add(new QVariant<String>("BacklogManager", QVariant.Type.String));
retFunc.add(new QVariant<String>("", QVariant.Type.String));
retFunc.add(new QVariant<String>("requestBacklog", QVariant.Type.String));
retFunc.add(new QVariant<Integer>(buffer, "BufferId"));
retFunc.add(new QVariant<Integer>(firstMsg, "MsgId"));
retFunc.add(new QVariant<Integer>(lastMsg, "MsgId"));
retFunc.add(new QVariant<Integer>(Config.backlogLimit, QVariant.Type.Int));
retFunc.add(new QVariant<Integer>(Config.backlogAdditional, QVariant.Type.Int));
try {
sendQVariantList(retFunc);
} catch (IOException e) {
e.printStackTrace();
}
}
private void sendMessage(int buffer, String message) {
List<QVariant<?>> retFunc = new LinkedList<QVariant<?>>();
retFunc.add(new QVariant<Integer>(RequestType.RpcCall.getValue(), QVariant.Type.Int));
retFunc.add(new QVariant<String>("2sendInput(BufferInfo,QString)", QVariant.Type.String));
retFunc.add(new QVariant<BufferInfo>(buffers.get(buffer).getInfo(), "BufferInfo"));
retFunc.add(new QVariant<String>("/SAY " + message, QVariant.Type.String));
try {
sendQVariantList(retFunc);
} catch (IOException e) {
e.printStackTrace();
}
}
private class CustomTrustManager implements javax.net.ssl.X509TrustManager {
/*
* The default X509TrustManager returned by SunX509. We'll delegate
* decisions to it, and fall back to the logic in this class if the
* default X509TrustManager doesn't trust it.
*/
X509TrustManager defaultTrustManager;
CustomTrustManager() throws GeneralSecurityException {
// create a "default" JSSE X509TrustManager.
KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType());
//ks.load(new FileInputStream("trustedCerts"),
// "passphrase".toCharArray());
TrustManagerFactory tmf = TrustManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
tmf.init(ks);
TrustManager tms [] = tmf.getTrustManagers();
/*
* Iterate over the returned trustmanagers, look
* for an instance of X509TrustManager. If found,
* use that as our "default" trust manager.
*/
for (int i = 0; i < tms.length; i++) {
if (tms[i] instanceof X509TrustManager) {
defaultTrustManager = (X509TrustManager) tms[i];
return;
}
}
/*
* Find some other way to initialize, or else we have to fail the
* constructor.
*/
throw new GeneralSecurityException("Couldn't initialize");
}
/*
* Delegate to the default trust manager.
*/
public void checkClientTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
try {
defaultTrustManager.checkClientTrusted(chain, authType);
} catch (CertificateException excep) {
}
}
/*
* Delegate to the default trust manager.
*/
public void checkServerTrusted(X509Certificate[] chain, String authType)
throws CertificateException {
try {
defaultTrustManager.checkServerTrusted(chain, authType);
} catch (CertificateException excep) {
String hashedCert = hash(chain[0].getEncoded());
if (CoreConnection.this.settings.contains("certificate")) {
if (!CoreConnection.this.settings.getString("certificate", "lol").equals(hashedCert)) {
throw new CertificateException();
}
} else {
System.out.println("Storing new certificate: " + hashedCert);
CoreConnection.this.settings.edit().putString("certificate", hashedCert).commit();
}
}
}
private String hash(byte[] s) {
try {
MessageDigest digest = java.security.MessageDigest.getInstance("SHA1");
digest.update(s);
byte messageDigest[] = digest.digest();
StringBuffer hexString = new StringBuffer();
for (int i=0; i<messageDigest.length; i++)
hexString.append(Integer.toHexString(0xFF & messageDigest[i]));
return hexString.toString();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
return "";
}
/*
* Merely pass this through.
*/
public X509Certificate[] getAcceptedIssuers() {
return defaultTrustManager.getAcceptedIssuers();
}
}
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package com.milkenknights.burgundyballista;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Watchdog;
import java.util.Enumeration;
import java.util.Vector;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Knight extends IterativeRobot {
ConfigFile config;
Vector subsystems;
Compressor compressor;
DriveSubsystem driveSubsystem;
CasterSubsystem casterSubsystem;
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
config = new ConfigFile("robot-config.txt");
config.loadFile();
compressor = new Compressor(config.getAsInt("compressorPressureSwitch"),
config.getAsInt("compressorRelayChannel"));
subsystems = new Vector(10);
subsystems.addElement(new DriveSubsystem(config));
subsystems.addElement(new CasterSubsystem(config));
// since no more subsystems will be added, we can free the remaining
// memory
subsystems.trimToSize();
compressor.start();
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
JStickMultiton.updateAll();
for (Enumeration e = subsystems.elements(); e.hasMoreElements();) {
((Subsystem) e.nextElement()).teleopPeriodic();
}
// Feed the Watchdog. Makes the motors not fail every 100ms
Watchdog.getInstance().feed();
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
}
}
|
package com.namelessmc.NamelessAPI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import com.google.gson.JsonObject;
import com.namelessmc.NamelessAPI.Notification.NotificationType;
import com.namelessmc.NamelessAPI.Request.Action;
public final class NamelessPlayer {
private String userName;
private String displayName;
private UUID uuid;
private int groupID;
private int reputation;
private Date registeredDate;
private boolean exists;
private boolean validated;
private boolean banned;
private String groupName;
private URL baseUrl;
/**
* Creates a new NamelessPlayer object. This constructor should not be called in the main server thread.
* @param uuid
* @param baseUrl Base API URL: <i>http(s)://yoursite.com/api/v2/API_KEY<i>
* @throws NamelessException
*/
NamelessPlayer(UUID uuid, URL baseUrl) throws NamelessException {
this.uuid = uuid;
this.baseUrl = baseUrl;
final Request request = new Request(baseUrl, Action.USER_INFO, new ParameterBuilder().add("uuid", uuid).build());
init(request);
}
private void init(Request request) throws NamelessException {
request.connect();
if (request.hasError()) throw new ApiError(request.getError());
final JsonObject response = request.getResponse();
exists = response.get("exists").getAsBoolean();
if (!exists) {
return;
}
// Convert UNIX timestamp to date
Date registered = new Date(Long.parseLong(response.get("registered").toString().replaceAll("^\"|\"$", "")) * 1000);
userName = response.get("username").getAsString();
displayName = response.get("displayname").getAsString();
//uuid = UUID.fromString(addDashesToUUID(response.get("uuid").getAsString()));
groupName = response.get("group_name").getAsString();
groupID = response.get("group_id").getAsInt();
registeredDate = registered;
validated = response.get("validated").getAsBoolean();
//reputation = response.get("reputation").getAsInt();
reputation = 0; // temp until reputation is added to API
banned = response.get("banned").getAsBoolean();
}
/**
* @return The Minecraft username associated with the provided UUID. This is not always the name displayed on the website.
* @see #getDisplayName()
*/
public String getUsername() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return userName;
}
/**
* @return The name this player uses on the website. This is not always the same as their Minecraft username.
* @see #getUsername()
*/
public String getDisplayName() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return displayName;
}
/**
* @return Minecraft UUID of this player.
* @see #getUsername()
*/
public UUID getUniqueId() {
return uuid;
}
/**
* @return A numerical group id.
*/
public int getGroupID() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return groupID;
}
/**
* @return The user's primary group name
*/
public String getGroupName() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return groupName;
}
/**
* @return The user's site reputation.
*/
public int getReputation() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return reputation;
}
/**
* @return The date the user registered on the website.
*/
public Date getRegisteredDate() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return registeredDate;
}
/**
* @return Whether an account associated with the UUID exists.
* @see #getUniqueId()
*/
public boolean exists() {
return exists;
}
/**
* @return Whether this account has been validated. An account is validated when a password is set.
*/
public boolean isValidated() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return validated;
}
/**
* @return Whether this account is banned from the website.
*/
public boolean isBanned() {
if (!exists) {
throw new UnsupportedOperationException("This player does not exist.");
}
return banned;
}
/**
* @param code
* @return True if the user could be validated successfully, false if the provided code is wrong
* @throws NamelessException
* @throws
*/
public boolean validate(String code) throws NamelessException {
final String[] params = new ParameterBuilder()
.add("uuid", uuid)
.add("code", code).build();
final Request request = new Request(baseUrl, Action.VALIDATE_USER, params);
request.connect();
if (request.hasError()) {
if (request.getError() == ApiError.INVALID_VALIDATE_CODE) {
return false;
} else {
throw new ApiError(request.getError());
}
} else {
return true;
}
}
public List<Notification> getNotifications() throws NamelessException {
final Request request = new Request(baseUrl, Action.GET_NOTIFICATIONS, new ParameterBuilder().add("uuid", uuid).build());
request.connect();
if (request.hasError()) throw new ApiError(request.getError());
final List<Notification> notifications = new ArrayList<>();
final JsonObject object = request.getResponse();
object.getAsJsonArray("notifications").forEach((element) -> {
final String message = element.getAsJsonObject().get("message").getAsString();
final String url = element.getAsJsonObject().get("url").getAsString();
final NotificationType type = NotificationType.fromString(element.getAsJsonObject().get("type").getAsString());
notifications.add(new Notification(message, url, type));
});
return notifications;
}
/**
* Sets the players group
* @param groupId Numerical ID associated with a group
* @throws NamelessException
*/
public void setGroup(int groupId) throws NamelessException {
final String[] parameters = new ParameterBuilder().add("uuid", uuid).add("group_id", groupId).build();
final Request request = new Request(baseUrl, Action.SET_GROUP, parameters);
request.connect();
if (request.hasError()) throw new ApiError(request.getError());
}
/**
* Registers a new account. The player will be sent an email to set a password.
* @param minecraftName In-game name for this player
* @param email Email address
* @return Email verification disabled: A link which the user needs to click to complete registration
* <br>Email verification enabled: An empty string (the user needs to check their email to complete registration)
* @throws NamelessException
*/
public String register(String minecraftName, String email) throws NamelessException {
final String[] parameters = new ParameterBuilder().add("username", minecraftName).add("uuid", uuid).add("email", email).build();
final Request request = new Request(baseUrl, Action.REGISTER, parameters);
request.connect();
if (request.hasError()) throw new ApiError(request.getError());
final JsonObject response = request.getResponse();
if (response.has("link")) {
return response.get("link").getAsString();
} else {
return "";
}
}
/**
* Reports a player
* @param reportedUuid UUID of the reported player
* @param reportedUsername In-game name of the reported player
* @param reason Reason why this player has been reported
* @throws NamelessException
*/
public void createReport(UUID reportedUuid, String reportedUsername, String reason) throws NamelessException {
final String[] parameters = new ParameterBuilder()
.add("reporter_uuid", uuid)
.add("reported_uuid", reportedUuid)
.add("reported_username", reportedUsername)
.add("content", reason)
.build();
Request request = new Request(baseUrl, Action.CREATE_REPORT, parameters);
request.connect();
if (request.hasError()) throw new ApiError(request.getError());
}
}
|
package com.novell.spsample.client;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Anchor;
import com.novell.spiffyui.client.JSUtil;
import com.novell.spiffyui.client.MainHeader;
import com.novell.spiffyui.client.MessageUtil;
import com.novell.spiffyui.client.rest.RESTException;
import com.novell.spiffyui.client.rest.RESTObjectCallBack;
import com.novell.spiffyui.client.rest.RESTility;
/**
* This is the header for SPSample.
*
*/
public class SPSampleHeader extends MainHeader
{
/**
* Creates a new SPSampleHeader panel
*/
public SPSampleHeader()
{
Anchor logout = new Anchor("Logout", "
logout.getElement().setId("header_logout");
setLogout(logout);
if (!Index.userLoggedIn()) {
JSUtil.hide("#header_logout", "fast");
setWelcomeString("");
} else {
String token = RESTility.getUserToken();
setWelcomeString("Welcome " + token.substring(0, token.indexOf("-")));
}
logout.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event)
{
event.preventDefault();
doLogout();
}
});
}
/**
* Logout of the application
*/
public static void doLogout()
{
RESTility.getAuthProvider().logout(new RESTObjectCallBack<String>()
{
public void success(String message)
{
Window.Location.reload();
}
public void error(String message)
{
Window.Location.reload();
}
public void error(RESTException e)
{
MessageUtil.showFatalError(e.getReason());
}
});
}
}
|
package com.salesforce.dataloader.ui;
import org.apache.log4j.Logger;
import org.eclipse.jface.wizard.IWizardPage;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.BusyIndicator;
import org.eclipse.swt.events.*;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import com.salesforce.dataloader.config.Config;
import com.salesforce.dataloader.controller.Controller;
import com.salesforce.dataloader.util.ExceptionUtil;
import com.sforce.soap.partner.fault.ApiFault;
import com.sforce.soap.partner.fault.LoginFault;
import com.sforce.ws.ConnectionException;
/**
* Describe your class here.
*
* @author Lexi Viripaeff
* @since 6.0
*/
public class SettingsPage extends WizardPage {
private final Controller controller;
private Text textPassword;
private Text textUsername;
private Button isSessionIdLogin;
private Text textSessionId;
private Text textEndpoint;
private Label loginLabel;
private final String nestedException = "nested exception is:";
// logger
private static Logger logger = Logger.getLogger(SettingsPage.class);
public SettingsPage(Controller controller) {
super(Labels.getString("SettingsPage.title"), Labels.getString("SettingsPage.titleMsg"), UIUtils.getImageRegistry().getDescriptor("splashscreens")); //$NON-NLS-1$ //$NON-NLS-2$
this.controller = controller;
setPageComplete(false);
// Set the description
setDescription(Labels.getString("SettingsPage.enterUsernamePassword")); //$NON-NLS-1$
}
public void createControl(Composite parent) {
getShell().setImage(UIUtils.getImageRegistry().get("sfdc_icon")); //$NON-NLS-1$
Config config = controller.getConfig();
Composite comp = new Composite(parent, SWT.NONE);
GridLayout gridLayout = new GridLayout();
gridLayout.numColumns = 3;
gridLayout.marginHeight = 30;
comp.setLayout(gridLayout);
Label labelUsername = new Label(comp, SWT.RIGHT);
labelUsername.setText(Labels.getString("SettingsPage.username")); //$NON-NLS-1$
textUsername = new Text(comp, SWT.BORDER);
textUsername.setText(config.getString(Config.USERNAME));
GridData data = new GridData(SWT.FILL, SWT.CENTER, true, false);
data.widthHint = 150;
textUsername.setLayoutData(data);
Composite composite2 = new Composite(comp, SWT.NONE);
data = new GridData();
data.verticalSpan = 2;
composite2.setLayoutData(data);
Label labelPassword = new Label(comp, SWT.RIGHT);
labelPassword.setText(Labels.getString("SettingsPage.password")); //$NON-NLS-1$
textPassword = new Text(comp, SWT.BORDER | SWT.PASSWORD);
// don't want to cache the password
config.setValue(Config.PASSWORD, ""); //$NON-NLS-1$
textPassword.setText(config.getString(Config.PASSWORD));
data = new GridData(SWT.FILL, SWT.CENTER, true, false);
data.widthHint = 150;
textPassword.setLayoutData(data);
if(config.getBoolean(Config.SFDC_INTERNAL)) {
//spacer
Label spacer = new Label(comp, SWT.NONE);
data = new GridData();
data.horizontalSpan = 3;
data.widthHint = 15;
spacer.setLayoutData(data);
//lIsSessionLogin checkbox
Label labelIsSessionIdLogin = new Label(comp, SWT.RIGHT);
labelIsSessionIdLogin.setText(Labels.getString("SettingsPage.isSessionIdLogin")); //$NON-NLS-1$
isSessionIdLogin = new Button(comp, SWT.CHECK);
isSessionIdLogin.setSelection(config.getBoolean(Config.SFDC_INTERNAL_IS_SESSION_ID_LOGIN));
data = new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING);
data.horizontalSpan = 2;
isSessionIdLogin.setLayoutData(data);
isSessionIdLogin.addSelectionListener(new SelectionAdapter(){
@Override
public void widgetSelected(SelectionEvent event) {
reconcileLoginCredentialFieldsEnablement();
}
});
//sessionId
Label labelSessionId = new Label(comp, SWT.RIGHT);
labelSessionId.setText(Labels.getString("SettingsPage.sessionId")); //$NON-NLS-1$
textSessionId = new Text(comp, SWT.BORDER);
textSessionId.setText(config.getString(Config.SFDC_INTERNAL_SESSION_ID));
data = new GridData(SWT.FILL, SWT.CENTER, true, false);
data.widthHint = 150;
textSessionId.setLayoutData(data);
// consume the 2 cells to the right of textSessionId & textEndpoint
composite2 = new Composite(comp, SWT.NONE);
data = new GridData();
data.verticalSpan = 2;
composite2.setLayoutData(data);
//endpoint
Label labelEndpoint = new Label(comp, SWT.RIGHT);
labelEndpoint.setText(Labels.getString("SettingsPage.instServerUrl")); //$NON-NLS-1$
textEndpoint = new Text(comp, SWT.BORDER);
textEndpoint.setText(config.getString(Config.ENDPOINT));
data = new GridData(SWT.FILL, SWT.CENTER, true, false);
data.widthHint = 150;
textEndpoint.setLayoutData(data);
reconcileLoginCredentialFieldsEnablement();
}
loginLabel = new Label(comp, SWT.NONE);
data = new GridData(GridData.FILL_HORIZONTAL);
data.horizontalSpan = 3;
data.widthHint = 220;
loginLabel.setLayoutData(data);
Button loginButton = new Button(comp, SWT.PUSH);
loginButton.setText(Labels.getString("SettingsPage.login")); //$NON-NLS-1$
data = new GridData(GridData.HORIZONTAL_ALIGN_END);
data.horizontalSpan = 2;
data.widthHint = 75;
loginButton.setLayoutData(data);
final LoginButtonSelectionListener loginListener = new LoginButtonSelectionListener();
loginButton.addSelectionListener(loginListener);
parent.getShell().setDefaultButton(loginButton);
Composite composite5 = new Composite(comp, SWT.NONE);
data = new GridData();
data.horizontalSpan = 2;
composite5.setLayoutData(data);
setControl(comp);
// respond to enter key on username and password box
textUsername.addKeyListener(new UsernamePasswordKeyListener(loginListener));
textPassword.addKeyListener(new UsernamePasswordKeyListener(loginListener));
}
private static class UsernamePasswordKeyListener implements KeyListener {
private final LoginButtonSelectionListener listener;
private UsernamePasswordKeyListener(
LoginButtonSelectionListener listener) {
super();
this.listener = listener;
}
public void keyReleased(KeyEvent arg0) {
if(arg0.keyCode == 13) {
listener.widgetSelected(null);
}
}
public void keyPressed(KeyEvent arg0) {
// do nothing on press, only on release.
}
}
// non-static since it needs access to SettingsPage member variables
private class LoginButtonSelectionListener extends SelectionAdapter {
@Override
public void widgetSelected(SelectionEvent event) {
Config config = controller.getConfig();
config.setValue(Config.USERNAME, textUsername.getText());
config.setValue(Config.PASSWORD, textPassword.getText());
if(config.getBoolean(Config.SFDC_INTERNAL)) {
config.setValue(Config.SFDC_INTERNAL_IS_SESSION_ID_LOGIN, isSessionIdLogin.getSelection());
config.setValue(Config.SFDC_INTERNAL_SESSION_ID, textSessionId.getText());
config.setValue(Config.ENDPOINT, textEndpoint.getText());
}
controller.saveConfig();
loginLabel.setText(Labels.getString("SettingsPage.verifyingLogin")); //$NON-NLS-1$
BusyIndicator.showWhile(Display.getDefault(), new Thread() {
@Override
public void run() {
try {
if (controller.login() && controller.setEntityDescribes()) {
loginLabel.setText(Labels.getString("SettingsPage.loginSuccessful")); //$NON-NLS-1$
controller.saveConfig();
loadDataSelectionPage(controller);
} else {
loginLabel.setText(Labels.getString("SettingsPage.invalidLogin")); //$NON-NLS-1$
setPageComplete(false);
}
} catch (LoginFault lf ) {
loginLabel.setText(Labels.getString("SettingsPage.invalidLogin"));
setPageComplete(false);
} catch (ApiFault e) {
String msg = e.getExceptionMessage();
processException(msg);
logger.error(msg);
} catch (ConnectionException e) {
String msg = e.getMessage();
processException(msg);
logger.error(msg);
} catch (Throwable e) {
String msg = e.getMessage();
processException(msg);
logger.error(msg);
logger.error("\n" + ExceptionUtil.getStackTraceString(e));
}
}
/**
* @param msg
*/
private void processException(String msg) {
if (msg == null || msg.length() < 1) {
loginLabel.setText(Labels.getString("SettingsPage.invalidLogin"));
} else {
int x = msg.indexOf(nestedException);
if (x >= 0) {
x += nestedException.length();
msg = msg.substring(x);
}
loginLabel.setText(msg.replace('\n', ' ').trim());
}
setPageComplete(false);
}
});
}
}
/**
* Need to subclass this function to prevent the getNextPage() function being called before the button is clicked.
*/
@Override
public boolean canFlipToNextPage() {
return isPageComplete();
}
/**
* Returns the next page, login.
*
* @return IWizardPage
*/
@Override
public IWizardPage getNextPage() {
return super.getNextPage();
}
/**
* Loads DataSelectionPage. To be overridden by subclasses for special behavior.
*
* @param controller
*/
protected void loadDataSelectionPage(Controller controller) {
DataSelectionPage selection = (DataSelectionPage)getWizard().getPage(Labels.getString("DataSelectionPage.data")); //$NON-NLS-1$
if(selection.setupPage()) {
setPageComplete(true);
} else {
// this shouldn't relly happen since client is logged in and entity describes are set
loginLabel.setText(Labels.getString("SettingsPage.invalidLogin")); //$NON-NLS-1$
setPageComplete(false);
}
}
/**
* Enables or disables username/password or sessionId/serverUrl
* text fields depending on if isSessionIdLogin is checked.
*/
private void reconcileLoginCredentialFieldsEnablement() {
textUsername.setEnabled(!isSessionIdLogin.getSelection());
textPassword.setEnabled(!isSessionIdLogin.getSelection());
textSessionId.setEnabled(isSessionIdLogin.getSelection());
textEndpoint.setEnabled(isSessionIdLogin.getSelection());
}
public static boolean isNeeded(Controller controller) {
return (!controller.loginIfSessionExists() || controller.getEntityDescribes() == null || controller
.getEntityDescribes().isEmpty());
}
}
|
package com.techcavern.wavetact.events;
import org.pircbotx.PircBotX;
import org.pircbotx.hooks.ListenerAdapter;
import org.pircbotx.hooks.events.KickEvent;
/**
*
* @author jztech101
*/
public class kickrejoin extends ListenerAdapter<PircBotX> {
public void onKick(KickEvent<PircBotX> event) throws Exception{
if (event.getRecipient() == event.getBot().getUserBot()){
event.getBot().sendIRC().joinChannel(event.getChannel().getName());
}
}
}
|
package com.twu.biblioteca.menuOperation;
import com.twu.biblioteca.libraryComponent.Book;
import com.twu.biblioteca.util.Inputer;
import com.twu.biblioteca.util.Printer;
import java.util.Optional;
public class CheckOut implements Operation{
private Printer printer;
private Inputer inputer;
private BookList bookList;
public static final String SUCCESS_CHECKOUT_MESSAGE = "Thank you! Enjoy the book";
public CheckOut(Printer printer, Inputer inputer, BookList bookList) {
this.printer = printer;
this.inputer = inputer;
this.bookList = bookList;
}
@Override
public void operate() {
printer.print("Please Input Book's Name:");
printer.print(bookList.getBookList());
Optional<Book> checkoutBook = bookList.getBook(inputer.getOption());
if(checkoutBook.isPresent()) {
bookList.checkOut(checkoutBook.get());
printer.print(SUCCESS_CHECKOUT_MESSAGE);
}
}
@Override
public String toString() {
return "Checkout a book";
}
}
|
package com.wakatime.intellij.plugin;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
public class Dependencies {
private static final String cliVersion = "4.1.0";
private static String pythonLocation = null;
private static String resourcesLocation = null;
private static String cliLocation = null;
public static boolean isPythonInstalled() {
return Dependencies.getPythonLocation() != null;
}
public static String getResourcesLocation() {
if (Dependencies.resourcesLocation == null) {
String separator = "[\\\\/]";
Dependencies.resourcesLocation = WakaTime.class.getResource("WakaTime.class").getPath()
.replaceFirst("file:", "")
.replaceAll("%20", " ")
.replaceFirst("com" + separator + "wakatime" + separator + "intellij" + separator + "plugin" + separator + "WakaTime.class", "")
.replaceFirst("WakaTime.jar!" + separator, "") + "WakaTime-resources";
if (System.getProperty("os.name").startsWith("Windows") && Dependencies.resourcesLocation.startsWith("/")) {
Dependencies.resourcesLocation = Dependencies.resourcesLocation.substring(1);
}
}
return Dependencies.resourcesLocation;
}
public static String getPythonLocation() {
if (Dependencies.pythonLocation != null)
return Dependencies.pythonLocation;
String []paths = new String[] {
"pythonw",
"python",
"/usr/local/bin/python",
"/usr/bin/python",
"\\python37\\pythonw",
"\\Python37\\pythonw",
"\\python36\\pythonw",
"\\Python36\\pythonw",
"\\python35\\pythonw",
"\\Python35\\pythonw",
"\\python34\\pythonw",
"\\Python34\\pythonw",
"\\python33\\pythonw",
"\\Python33\\pythonw",
"\\python32\\pythonw",
"\\Python32\\pythonw",
"\\python31\\pythonw",
"\\Python31\\pythonw",
"\\python30\\pythonw",
"\\Python30\\pythonw",
"\\python27\\pythonw",
"\\Python27\\pythonw",
"\\python26\\pythonw",
"\\Python26\\pythonw",
"\\python37\\python",
"\\Python37\\python",
"\\python36\\python",
"\\Python36\\python",
"\\python35\\python",
"\\Python35\\python",
"\\python34\\python",
"\\Python34\\python",
"\\python33\\python",
"\\Python33\\python",
"\\python32\\python",
"\\Python32\\python",
"\\python31\\python",
"\\Python31\\python",
"\\python30\\python",
"\\Python30\\python",
"\\python27\\python",
"\\Python27\\python",
"\\python26\\python",
"\\Python26\\python",
};
for (int i=0; i<paths.length; i++) {
try {
Runtime.getRuntime().exec(paths[i]);
Dependencies.pythonLocation = paths[i];
break;
} catch (Exception e) { }
}
return Dependencies.pythonLocation;
}
public static boolean isCLIInstalled() {
File cli = new File(Dependencies.getCLILocation());
return (cli.exists() && !cli.isDirectory());
}
public static boolean isCLIOld() {
if (!Dependencies.isCLIInstalled()) {
return false;
}
ArrayList<String> cmds = new ArrayList<String>();
cmds.add(Dependencies.getPythonLocation());
cmds.add(Dependencies.getCLILocation());
cmds.add("--version");
try {
Process p = Runtime.getRuntime().exec(cmds.toArray(new String[cmds.size()]));
BufferedReader stdOut = new BufferedReader(new InputStreamReader(p.getInputStream()));
BufferedReader stdErr = new BufferedReader(new InputStreamReader(p.getErrorStream()));
String usingVersion = stdErr.readLine();
WakaTime.log.debug("*** STDOUT ***");
WakaTime.log.debug("\"" + stdOut.readLine() + "\"");
WakaTime.log.debug("*** STDERR ***");
WakaTime.log.debug("\"" + usingVersion + "\"");
if (usingVersion.contains(cliVersion)) {
return false;
}
} catch (Exception e) { }
return true;
}
public static String getCLILocation() {
return Dependencies.getResourcesLocation()+File.separator+"wakatime-master"+File.separator+"wakatime"+File.separator+"cli.py";
}
public static void installCLI() {
File cli = new File(Dependencies.getCLILocation());
if (!cli.getParentFile().getParentFile().getParentFile().exists())
cli.getParentFile().getParentFile().getParentFile().mkdirs();
String url = "https://codeload.github.com/wakatime/wakatime/zip/master";
String zipFile = cli.getParentFile().getParentFile().getParentFile().getAbsolutePath() + File.separator + "wakatime-cli.zip";
File outputDir = cli.getParentFile().getParentFile().getParentFile();
// Delete old wakatime-master directory if it exists
File dir = cli.getParentFile().getParentFile();
if (dir.exists()) {
deleteDirectory(dir);
}
// download wakatime-master.zip file
if (downloadFile(url, zipFile)) {
try {
Dependencies.unzip(zipFile, outputDir);
File oldZipFile = new File(zipFile);
oldZipFile.delete();
} catch (IOException e) {
WakaTime.log.error(e);
}
}
}
public static void upgradeCLI() {
File cliDir = new File(new File(Dependencies.getCLILocation()).getParent());
cliDir.delete();
Dependencies.installCLI();
}
public static void installPython() {
if (System.getProperty("os.name").contains("Windows")) {
String url = "https:
if (System.getenv("ProgramFiles(x86)") != null) {
url = "https:
}
File cli = new File(Dependencies.getCLILocation());
String outFile = cli.getParentFile().getParentFile().getAbsolutePath()+File.separator+"python.msi";
if (downloadFile(url, outFile)) {
// execute python msi installer
ArrayList<String> cmds = new ArrayList<String>();
cmds.add("msiexec");
cmds.add("/i");
cmds.add(outFile);
cmds.add("/norestart");
cmds.add("/qb!");
try {
Runtime.getRuntime().exec(cmds.toArray(new String[cmds.size()]));
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
public static boolean downloadFile(String url, String saveAs) {
File outFile = new File(saveAs);
// create output directory if does not exist
File outDir = outFile.getParentFile();
if (!outDir.exists())
outDir.mkdirs();
URL downloadUrl = null;
try {
downloadUrl = new URL(url);
} catch (MalformedURLException e) { }
ReadableByteChannel rbc = null;
FileOutputStream fos = null;
try {
rbc = Channels.newChannel(downloadUrl.openStream());
fos = new FileOutputStream(saveAs);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
return true;
} catch (RuntimeException e) {
WakaTime.log.error(e);
try {
SSLContext SSL_CONTEXT = SSLContext.getInstance("SSL");
SSL_CONTEXT.init(null, new TrustManager[] { new LocalSSLTrustManager() }, null);
HttpsURLConnection.setDefaultSSLSocketFactory(SSL_CONTEXT.getSocketFactory());
HttpsURLConnection conn = (HttpsURLConnection)downloadUrl.openConnection();
InputStream inputStream = conn.getInputStream();
fos = new FileOutputStream(saveAs);
int bytesRead = -1;
byte[] buffer = new byte[4096];
while ((bytesRead = inputStream.read(buffer)) != -1) {
fos.write(buffer, 0, bytesRead);
}
inputStream.close();
return true;
} catch (NoSuchAlgorithmException e1) {
WakaTime.log.error(e1);
} catch (KeyManagementException e1) {
WakaTime.log.error(e1);
} catch (IOException e1) {
WakaTime.log.error(e1);
}
} catch (IOException e) {
WakaTime.log.error(e);
}
return false;
}
private static void unzip(String zipFile, File outputDir) throws IOException {
if(!outputDir.exists())
outputDir.mkdirs();
byte[] buffer = new byte[1024];
ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile));
ZipEntry ze = zis.getNextEntry();
while (ze != null) {
String fileName = ze.getName();
File newFile = new File(outputDir, fileName);
if (ze.isDirectory()) {
newFile.mkdirs();
} else {
FileOutputStream fos = new FileOutputStream(newFile.getAbsolutePath());
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
fos.close();
}
ze = zis.getNextEntry();
}
zis.closeEntry();
zis.close();
}
private static void deleteDirectory(File path) {
if( path.exists() ) {
File[] files = path.listFiles();
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
deleteDirectory(files[i]);
}
else {
files[i].delete();
}
}
}
path.delete();
}
}
|
package communication.unicast;
import java.io.*;
import java.net.*;
import java.util.Map;
import communication.CommunicationException;
import communication.protocols.*;
import communication.messages.*;
import data.system.NodeInfo;
/**
* A client-to-server networking server implemented using sockets
* @author Balazs Pete
*
*/
public class UnicastSocketServer extends UnicastServer {
private int port;
/**
* Create an instance of UnicastSocketNetworkingServer
* @param port The port to bind the interface to
*/
public UnicastSocketServer(int port) {
this.port = port;
}
@Override
public void acceptConnections() throws CommunicationException {
ServerSocket serverSocket = null;
boolean listening = true;
String location = null;
try {
location = Inet4Address.getLocalHost().getHostAddress();
} catch (UnknownHostException e1) {
// We can't get our IP address, oh well displaying less info...
}
System.out.println("UnicastSocketServer: Listening to connections" +
(location == null ? "" : " at " + location) +
" on port "+port);
try {
serverSocket = new ServerSocket(port);
while (listening) {
new MessageHandler(protocolMap, serverSocket.accept()).start();
}
serverSocket.close();
} catch (Exception e) {
throw CommunicationException.CANNOT_USE_PORT;
}
}
public void run() {
try {
acceptConnections();
} catch (CommunicationException e) {
e.printStackTrace();
}
}
/**
* A message handling wrapper to process messages in a separate thread
* @author Balazs Pete
*
*/
protected class MessageHandler extends Thread {
private Socket socket = null;
private Map<String, Protocol> protocolMap = null;
/**
* Create a new instance of MessageHandler
* @param protocol The protocol to use
* @param socket The socket to use to send reply
*/
public MessageHandler(Map<String, Protocol> protocolMap, Socket socket) {
this.socket = socket;
this.protocolMap = protocolMap;
}
public void run() {
tryGetSendMessage();
}
private void tryGetSendMessage() {
try {
// Pass the socket streams to writable object streams
ObjectOutputStream out = new ObjectOutputStream(socket.getOutputStream());
ObjectInputStream in = new ObjectInputStream(socket.getInputStream());
Object object = null;
while (true) {
try {
object = in.readObject();
} catch (ClassNotFoundException e) {
// Sent object does not exist
e.printStackTrace();
break;
} catch (SocketException e) {
// Connection has been closed
break;
} catch (EOFException e) {
// End of transferred content, just break
break;
}
// If stream has ended, end listening and close communication
if (object == null) break;
// Determine protocol to be used
Message input = (Message) object;
NodeInfo node = new NodeInfo(socket.getInetAddress().getHostName());
node.addLocation(socket.getInetAddress().getHostAddress());
input.setSender(node);
System.out.println("UnicastSocketServer: Received a " + input.getType() + " from " + input.getSender().getLocation() + " | " + input.getContents());
Protocol protocol = protocolMap.get(input.getType().intern());
// Process message and get response
Message message;
if(protocol != null) {
message = protocol.processMessage(input);
if (!protocol.hasReply() || message == null) {
break;
}
} else {
message = new ErrorMessage("Message type not supported");
}
// Send message
out.writeObject(message);
System.out.println("UnicastSocketServer: Sent a " + message.getType() + " to " + input.getSender().getLocation());// + " | " + message.getContents());
out.flush();
}
out.close();
in.close();
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void main(String[] args) {
UnicastSocketServer i = new UnicastSocketServer(8000);
i.putProtocol(new LoopbackProtocol());
try {
i.acceptConnections();
} catch (CommunicationException e) {
e.printStackTrace();
}
}
}
|
import java.util.*;
import javax.swing.JOptionPane;
/*
* The purpose of this class is to take in the raw user input text (the command)
* and interpret it; then calling the corresponding method with the command.
*/
public class CommandFetch
{
Controller ctrl;
public String fullCommand;
List<String> reservedWords = Arrays.asList("SELECT", "FROM", "INNER", "JOIN", "CREATE", "TABLE", "NOJOIN", "UPDATE", "IN", "SET", "TO", "DELETE", "ALL", "INSERT", "ORDERBY", "STRING", "INT", "DOUBLE", "DATE");
List<String> supportedDataTypes = Arrays.asList("String", "int", "double", "Date");
//loader: takes the input from the GUI text area as input; removing the line
//breaks and placing the 'words' into a string array (command). This array is
//then passed to the interpret method.
public CommandFetch(Controller _ctrl){
ctrl = _ctrl;
}
public void loader(String text)
{
text = text.replace("\n", " ");
fullCommand = text;
String[] command = text.split(" ");
//DEBUG MESSAGE
//System.out.println(fullCommand);
interpret(command);
}
//interpret: takes the string array and determines which command is being
//called via the first letter of the command; changing the control integer
//and then using said integer in a switch statement to call the corresponding
//calling method.
public void interpret(String[] commArr)
{
int control = 0;
/*
* LIST OF CONTROL VALUES:
* 0 = default, return syntax error
* 1 = create table
* 2 = delete
* 3 = insert
* 4 = update
* 5 = select
*/
if (commArr[0].equals("CREATE") && commArr[1].equals("TABLE"))
{
control = 1;
}
if (commArr[0].equals("DELETE"))
{
control = 2;
}
if (commArr[0].equals("INSERT"))
{
control = 3;
}
if (commArr[0].equals("UPDATE"))
{
control = 4;
}
if (commArr[0].equals("SELECT"))
{
control = 5;
}
try
{
switch(control)
{
case 0:
JOptionPane.showMessageDialog(null, "ERROR: INVALID COMMAND", "whoops", JOptionPane.ERROR_MESSAGE);
break;
case 1:
callCreateTable(commArr);
break;
case 2:
callDelete(commArr);
break;
case 3:
callInsert(commArr);
break;
case 4:
callUpdate(commArr);
break;
case 5:
List<String> commList = Arrays.asList(commArr);
callSelect(commList);
break;
default:
JOptionPane.showMessageDialog(null, "ERROR: INVALID COMMAND", "whoops", JOptionPane.ERROR_MESSAGE);
break;
}
}
catch(ReservedWordException | DataTypeException | GeneralSyntaxException | ArrayIndexOutOfBoundsException | NumberFormatException ex)
{
if (ex instanceof NumberFormatException)
{
JOptionPane.showMessageDialog(null, "ERROR: The input data does not match the expected data type!", "whoops", JOptionPane.ERROR_MESSAGE);
}
else
{
JOptionPane.showMessageDialog(null, ex.getMessage(), "whoops", JOptionPane.ERROR_MESSAGE);
}
}
}
//callCreateTable: takes the command string array as a parameter; gets the
//table name, and puts the data types (in string format) in one arraylist, and
//the field (column) names in another. Finally, it will call the create table
//command, passing the table name and the two arraylists as parameters.
public void callCreateTable(String[] command) throws ReservedWordException, DataTypeException, GeneralSyntaxException
{
String tableName = command[2];
String data = fullCommand.split("\\(")[1];
data = data.split("\\)")[0];
data = data.trim();
if (tableName.contains("("))
{
String[] s = tableName.split("\\(");
tableName = s[0];
}
if (reservedWords.contains(tableName.toUpperCase()))
{
throw new ReservedWordException("ERROR: [" + tableName + "] is a reserved key word!");
}
//DEBUG MESSAGE
System.out.println("CREATE TABLE DATA: " + data);
System.out.println("TABLENAME: " + tableName);
ArrayList<String> colNames = new ArrayList<String>();
ArrayList<String> dataTypes = new ArrayList<String>();
String dataArr[] = data.split(",");
if (dataArr.length < 2)
{
throw new GeneralSyntaxException("ERROR: Cannot have a table with only one column!");
}
//DEBUG MESSAGE
System.out.println("ARRAY: " + dataArr.toString());
System.out.println("A0: " + dataArr[0]);
System.out.println("A1: " + dataArr[1]);
for (int i = 0; i < dataArr.length; i++)
{
dataArr[i] = dataArr[i].trim();
String tempArr[] = dataArr[i].split(" ");
if (i > 0)
{
if (colNames.contains(tempArr[0]))
{
throw new GeneralSyntaxException("ERROR: Duplicate column name detected!");
}
}
if (reservedWords.contains(tempArr[0].toUpperCase()))
{
throw new ReservedWordException("ERROR: [" + tempArr[0] + "] is a reserved key word!");
}
if (!supportedDataTypes.contains(tempArr[1]))
{
throw new DataTypeException("ERROR: [" + tempArr[1] + "] is an invalid data type!");
}
colNames.add(tempArr[0]);
dataTypes.add(tempArr[1]);
}
//DEBUG MESSAGE
//System.out.println(colNames);
//System.out.println(dataTypes);
if (colNames.get(0).charAt(0) != 'P' || colNames.get(0).charAt(1) != 'K' || dataTypes.get(0).compareTo("int") != 0)
{
throw new GeneralSyntaxException("ERROR: Primary Key is not properly declared!");
}
//AND THEN I JUST PASS THE TWO ARRAYLISTS TO THE CREATE TABLE METHOD
ctrl.createTable(tableName, colNames, dataTypes);
}
public void callDelete(String[] command) throws GeneralSyntaxException
{
String tableName;
//All Rows
if (command[1].equalsIgnoreCase("ALL") && command[2].equalsIgnoreCase("ROWS"))
{
tableName = command[3];
Controller.deleteAllRows(tableName);
}
else
{
//Table
if (command[1].equalsIgnoreCase("TABLE"))
{
tableName = command[2];
//DEBUG
System.out.println("CF: " + tableName);
ctrl.deleteTable(tableName);
}
else
{
//Single Row
if (command[2].equalsIgnoreCase("FROM"))
{
tableName = command[3];
int PK = Integer.parseInt(command[1]);
Controller.deleteRow(tableName, PK);
}
else
{
throw new GeneralSyntaxException("ERROR: Re-check DELETE command syntax");
}
}
}
}
//This method operates in very much they same way as callCreateTable.
//It first takes the raw input string and grabs the table name from it, then moves one to
//fill two string arraylists with the field names and the values in string format. It then
//passes these 3 as parameters to the Controller's insertTable function.
public void callInsert(String[] command)
{
String tableName = command[1];
if (tableName.contains("("))
{
String[] s = tableName.split("\\(");
tableName = s[0];
}
ArrayList<String> values = new ArrayList<String>();
ArrayList<String> fNames = new ArrayList<String>();
String data = fullCommand.split("\\(")[1];
data = data.split("\\)")[0];
data = data.trim();
String dataArr[] = data.split(",");
for (int i = 0; i < dataArr.length; i++)
{
dataArr[i] = dataArr[i].trim();
String tempArr[] = dataArr[i].split(" ");
fNames.add(tempArr[0]);
values.add(tempArr[1]);
}
Controller.insertTable(tableName, fNames, values);
}
public void callUpdate(String[] command)
{
String tableName = command[3];
int PK = Integer.parseInt(command[1]);
String fName = command[5];
String value = command[7];
Controller.updateField(tableName, PK, fName, value);
}
public void callSelect(List<String> command) throws GeneralSyntaxException
{
ArrayList<String> colNames = new ArrayList<String>();
String joinTableName = "";
boolean whereControl = false;
boolean orderControl = false;
//DEBUG MESSAGE
System.out.println(command);
int tNameIDX = command.indexOf("FROM") + 1;
//DEBUG MESSAGE
System.out.println("INDEX IS " + tNameIDX);
String tableName = command.get(tNameIDX);
if (command.get(1).equalsIgnoreCase("*"))
{
colNames.add(command.get(1));
//DEBUG MESSAGE
System.out.println("SELECT ALL DETECTED");
}
else
{
for (int i = 1; i < tNameIDX - 1; i++)
{
String temp = command.get(i);
String temp2 = temp.split(",")[0];
temp2.trim();
colNames.add(temp2);
}
}
if (command.contains("INNER"))
{
int idx = command.indexOf("INNER");
if (!command.get(idx+1).equalsIgnoreCase("JOIN"))
{
throw new GeneralSyntaxException("ERROR: Check syntax on Inner Join!");
}
joinTableName = command.get(idx + 2);
}
if (command.contains("WHERE"))
{
whereControl = true;
}
if (command.contains("ORDERBY"))
{
orderControl = true;
}
//Controller SELECT call, pass:
//tableName
//joinTableName
//colNames
//whereControl
//orderControl
//fullString
//DEBUG MESSAGE
System.out.println("FULL COMMAND: " + command);
System.out.println("TABLE NAME: " + tableName);
System.out.println("JOIN TABLE NAME: " + joinTableName);
System.out.println("FIELDS: " + colNames);
String testcon = fetchWhere(command);
System.out.println("WHERE CONDITION: " + testcon);
String testfield = fetchField(command);
System.out.println("ORDERBY FIELD: " + testfield);
boolean testDir = fetchDir(command);
System.out.println("ORDERBY BOOL: " + testDir);
CommandSet selectC = new CommandSet();
selectC.fullCommand = command;
selectC.tableName = tableName;
selectC.joinTableName = joinTableName;
selectC.colNames = colNames;
if(whereControl)
selectC.whereC = fetchWhere(command);
else
selectC.whereC = "";
if(orderControl)
selectC.orderC = fetchField(command);
else
selectC.orderC = "";
selectC.orderDir = fetchDir(command);
ctrl.doSelect(selectC);
}
//FOLLOWING SECTION FOR SELECT FILTERING
public String fetchWhere(List<String> command)
{
String condition = "";
int start = command.indexOf("WHERE") + 1;
int end;
if (command.contains("ORDERBY"))
{
end = command.indexOf("ORDERBY");
if (!command.contains("DESC") || !command.contains("ASC")) {
JOptionPane.showMessageDialog(null,"Please add \"ASC\" for ascending order or \"DESC\" for descending order");
}
}
else
{
end = command.size();
}
for (int i = start; i < end; i++)
{
condition += command.get(i) + " ";
}
return condition;
}
public String fetchField(List<String> command)
{
int idx = command.indexOf("ORDERBY") + 1;
String fieldName = command.get(idx);
return fieldName;
}
public boolean fetchDir(List<String> command)
{
boolean sortDir;
int idx = command.indexOf("ORDERBY") + 2;
String direction = command.get(idx);
if (direction.contains(";"))
{
String s[] = direction.split(";");
direction = s[0];
}
if (direction.equalsIgnoreCase("DESC"))
{
sortDir = true;
}
else
{
sortDir = false;
}
return sortDir;
}
//FOLLOWING SECTION FOR ERROR HANDLING
//This exception for if the user inputs a table or field name that is a reserved word
class ReservedWordException extends Exception
{
public ReservedWordException(String message)
{
super(message);
}
}
//This exception for if the user inputs a non-supported datatype
class DataTypeException extends Exception
{
public DataTypeException(String message)
{
super(message);
}
}
//This exception for general errors
class GeneralSyntaxException extends Exception
{
public GeneralSyntaxException(String message)
{
super(message);
}
}
}
class CommandSet{
List<String> fullCommand;
String tableName ="";
String joinTableName ="";
ArrayList<String> colNames;
String whereC ="";
String orderC ="";
boolean orderDir = false;
}
|
package net.simonvt.widget;
import net.simonvt.menudrawer.R;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.Interpolator;
public abstract class MenuDrawer extends ViewGroup {
/**
* Callback interface for changing state of the drawer.
*/
public interface OnDrawerStateChangeListener {
/**
* Called when the drawer state changes.
*
* @param oldState The old drawer state.
* @param newState The new drawer state.
*/
void onDrawerStateChange(int oldState, int newState);
}
/**
* Tag used when logging.
*/
private static final String TAG = "MenuDrawer";
/**
* Indicates whether debug code should be enabled.
*/
private static final boolean DEBUG = false;
/**
* Key used when saving menu visibility state.
*/
private static final String STATE_MENU_VISIBLE = "net.simonvt.menudrawer.view.menu.menuVisible";
/**
* The time between each frame when animating the drawer.
*/
protected static final int ANIMATION_DELAY = 1000 / 60;
/**
* Interpolator used for stretching/retracting the arrow indicator.
*/
protected static final Interpolator ARROW_INTERPOLATOR = new AccelerateInterpolator();
/**
* Interpolator used for peeking at the drawer.
*/
private static final Interpolator PEEK_INTERPOLATOR = new PeekInterpolator();
/**
* Interpolator used when animating the drawer open/closed.
*/
private static final Interpolator SMOOTH_INTERPOLATOR = new SmoothInterpolator();
/**
* Default delay from {@link #peekDrawer()} is called until first animation is run.
*/
private static final long DEFAULT_PEEK_START_DELAY = 5000;
/**
* Default delay between each subsequent animation, after {@link #peekDrawer()} has been called.
*/
private static final long DEFAULT_PEEK_DELAY = 10000;
/**
* The duration of the peek animation.
*/
private static final int PEEK_DURATION = 5000;
/**
* The maximum touch area width of the drawer in dp.
*/
private static final int MAX_DRAG_BEZEL_DP = 24;
/**
* The maximum animation duration.
*/
private static final int DURATION_MAX = 600;
/**
* The maximum alpha of the dark menu overlay used for dimming the menu.
*/
protected static final int MAX_MENU_OVERLAY_ALPHA = 185;
/**
* Drag mode for sliding only the content view.
*/
public static final int MENU_DRAG_CONTENT = 0;
/**
* Drag mode for sliding the entire window.
*/
public static final int MENU_DRAG_WINDOW = 1;
/**
* Position the menu to the left of the content.
*/
public static final int MENU_POSITION_LEFT = 0;
/**
* Position the menu to the right of the content.
*/
public static final int MENU_POSITION_RIGHT = 1;
/**
* Indicates that the drawer is currently closed.
*/
public static final int STATE_CLOSED = 0;
/**
* Indicates that the drawer is currently closing.
*/
public static final int STATE_CLOSING = 1;
/**
* Indicates that the drawer is currently being dragged by the user.
*/
public static final int STATE_DRAGGING = 2;
/**
* Indicates that the drawer is currently opening.
*/
public static final int STATE_OPENING = 4;
/**
* Indicates that the drawer is currently open.
*/
public static final int STATE_OPEN = 8;
/**
* Distance in dp from closed position from where the drawer is considered closed with regards to touch events.
*/
private static final int CLOSE_ENOUGH = 3;
/**
* Indicates whether to use {@link View#setTranslationX(float)} when positioning views.
*/
static final boolean USE_TRANSLATIONS = Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB;
/**
* Drawable used as menu overlay.
*/
protected Drawable mMenuOverlay;
/**
* Defines whether the drop shadow is enabled.
*/
private boolean mDropShadowEnabled;
/**
* Drawable used as content drop shadow onto the menu.
*/
protected Drawable mDropShadowDrawable;
/**
* The width of the content drop shadow.
*/
protected int mDropShadowWidth;
/**
* Arrow bitmap used to indicate the active view.
*/
protected Bitmap mArrowBitmap;
/**
* The currently active view.
*/
protected View mActiveView;
/**
* Position of the active view. This is compared to View#getTag(R.id.mdActiveViewPosition) when drawing the arrow.
*/
protected int mActivePosition;
/**
* Used when reading the position of the active view.
*/
protected Rect mActiveRect = new Rect();
/**
* The parent of the menu view.
*/
protected BuildLayerFrameLayout mMenuContainer;
/**
* The parent of the content view.
*/
protected BuildLayerFrameLayout mContentView;
/**
* The width of the menu.
*/
protected int mMenuWidth;
/**
* Indicates whether the menu width has been set in the theme.
*/
private boolean mMenuWidthFromTheme;
/**
* Current left position of the content.
*/
protected int mOffsetPixels;
/**
* Indicates whether the menu is currently visible.
*/
protected boolean mMenuVisible;
/**
* The drag mode of the drawer. Can be either {@link #MENU_DRAG_CONTENT} or {@link #MENU_DRAG_WINDOW}.
*/
private int mDragMode;
/**
* The current drawer state.
*
* @see #STATE_CLOSED
* @see #STATE_CLOSING
* @see #STATE_DRAGGING
* @see #STATE_OPENING
* @see #STATE_OPEN
*/
private int mDrawerState = STATE_CLOSED;
/**
* The maximum touch area width of the drawer in px.
*/
protected int mMaxDragBezelSize;
/**
* The touch area width of the drawer in px.
*/
protected int mDragBezelSize;
/**
* Indicates whether the drawer is currently being dragged.
*/
protected boolean mIsDragging;
/**
* Slop before starting a drag.
*/
protected final int mTouchSlop;
/**
* The initial X position of a drag.
*/
protected float mInitialMotionX;
/**
* The last X position of a drag.
*/
protected float mLastMotionX = -1;
/**
* The last Y position of a drag.
*/
protected float mLastMotionY = -1;
/**
* Runnable used when animating the drawer open/closed.
*/
private final Runnable mDragRunnable = new Runnable() {
public void run() {
postAnimationInvalidate();
}
};
/**
* Runnable used when the peek animation is running.
*/
protected final Runnable mPeekRunnable = new Runnable() {
@Override
public void run() {
peekDrawerInvalidate();
}
};
/**
* Runnable used for first call to {@link #startPeek()} after {@link #peekDrawer()} has been called.
*/
private Runnable mPeekStartRunnable;
/**
* Default delay between each subsequent animation, after {@link #peekDrawer()} has been called.
*/
protected long mPeekDelay;
/**
* Scroller used when animating the drawer open/closed.
*/
private Scroller mScroller;
/**
* Scroller used for the peek drawer animation.
*/
protected Scroller mPeekScroller;
/**
* Velocity tracker used when animating the drawer open/closed after a drag.
*/
protected VelocityTracker mVelocityTracker;
/**
* Maximum velocity allowed when animating the drawer open/closed.
*/
protected int mMaxVelocity;
/**
* Listener used to dispatch state change events.
*/
private OnDrawerStateChangeListener mOnDrawerStateChangeListener;
/**
* Indicates whether the menu should be offset when dragging the drawer.
*/
protected boolean mOffsetMenu = true;
/**
* Distance in px from closed position from where the drawer is considered closed with regards to touch events.
*/
protected int mCloseEnough;
/**
* Indicates whether the current layer type is {@link View#LAYER_TYPE_HARDWARE}.
*/
private boolean mLayerTypeHardware;
/**
* Indicates whether to use {@link View#LAYER_TYPE_HARDWARE} when animating the drawer.
*/
private boolean mHardwareLayersEnabled = true;
public MenuDrawer(Context context) {
this(context, null);
}
public MenuDrawer(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.menuDrawerStyle);
}
public MenuDrawer(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
setWillNotDraw(false);
setFocusable(false);
TypedArray a =
context.obtainStyledAttributes(attrs, R.styleable.MenuDrawer, defStyle, R.style.Widget_MenuDrawer);
final Drawable contentBackground = a.getDrawable(R.styleable.MenuDrawer_mdContentBackground);
final Drawable menuBackground = a.getDrawable(R.styleable.MenuDrawer_mdMenuBackground);
mMenuWidth = a.getDimensionPixelSize(R.styleable.MenuDrawer_mdMenuWidth, -1);
mMenuWidthFromTheme = mMenuWidth != -1;
final int arrowResId = a.getResourceId(R.styleable.MenuDrawer_mdArrowDrawable, 0);
if (arrowResId != 0) {
mArrowBitmap = BitmapFactory.decodeResource(getResources(), arrowResId);
}
mDropShadowEnabled = a.getBoolean(R.styleable.MenuDrawer_mdDropShadowEnabled, true);
final int dropShadowColor = a.getColor(R.styleable.MenuDrawer_mdDropShadowColor, 0xFF000000);
setDropShadowColor(dropShadowColor);
mDropShadowWidth = a.getDimensionPixelSize(R.styleable.MenuDrawer_mdDropShadowWidth, dpToPx(6));
a.recycle();
mMenuContainer = new BuildLayerFrameLayout(context);
mMenuContainer.setId(R.id.md__menu);
mMenuContainer.setBackgroundDrawable(menuBackground);
addView(mMenuContainer);
mContentView = new NoClickThroughFrameLayout(context);
mContentView.setId(R.id.md__content);
mContentView.setBackgroundDrawable(contentBackground);
addView(mContentView);
mMenuOverlay = new ColorDrawable(0xFF000000);
final ViewConfiguration configuration = ViewConfiguration.get(context);
mTouchSlop = configuration.getScaledTouchSlop();
mMaxVelocity = configuration.getScaledMaximumFlingVelocity();
mScroller = new Scroller(context, SMOOTH_INTERPOLATOR);
mPeekScroller = new Scroller(context, PEEK_INTERPOLATOR);
mMaxDragBezelSize = dpToPx(MAX_DRAG_BEZEL_DP);
mCloseEnough = dpToPx(CLOSE_ENOUGH);
}
private int dpToPx(int dp) {
return (int) (getResources().getDisplayMetrics().density * dp + 0.5f);
}
/**
* Toggles the menu open and close with animation.
*/
public void toggleMenu() {
toggleMenu(true);
}
/**
* Toggles the menu open and close.
*
* @param animate Whether open/close should be animated.
*/
public void toggleMenu(boolean animate) {
if (mDrawerState == STATE_OPEN || mDrawerState == STATE_OPENING) {
closeMenu(animate);
} else if (mDrawerState == STATE_CLOSED || mDrawerState == STATE_CLOSING) {
openMenu(animate);
}
}
/**
* Animates the menu open.
*/
public void openMenu() {
openMenu(true);
}
/**
* Opens the menu.
*
* @param animate Whether open/close should be animated.
*/
public void openMenu(boolean animate) {
animateOffsetTo(mMenuWidth, 0, animate);
}
/**
* Animates the menu closed.
*/
public void closeMenu() {
closeMenu(true);
}
/**
* Closes the menu.
*
* @param animate Whether open/close should be animated.
*/
public void closeMenu(boolean animate) {
animateOffsetTo(0, 0, animate);
}
/**
* Indicates whether the menu is currently visible.
*
* @return True if the menu is open, false otherwise.
*/
public boolean isMenuVisible() {
return mMenuVisible;
}
/**
* Set the active view. If the mdArrowDrawable attribute is set, this View will have an arrow drawn next to it.
*
* @param v The active view.
* @param position Optional position, usually used with ListView. v.setTag(R.id.mdActiveViewPosition, position)
* must be called first.
*/
public void setActiveView(View v, int position) {
mActiveView = v;
mActivePosition = position;
invalidate();
}
/**
* Enables or disables offsetting the menu when dragging the drawer.
*
* @param offsetMenu True to offset the menu, false otherwise.
*/
public void setOffsetMenuEnabled(boolean offsetMenu) {
if (offsetMenu != mOffsetMenu) {
mOffsetMenu = offsetMenu;
requestLayout();
invalidate();
}
}
/**
* Indicates whether the menu is being offset when dragging the drawer.
*
* @return True if the menu is being offset, false otherwise.
*/
public boolean getOffsetMenuEnabled() {
return mOffsetMenu;
}
/**
* Returns the state of the drawer. Can be one of {@link #STATE_CLOSED}, {@link #STATE_CLOSING},
* {@link #STATE_DRAGGING}, {@link #STATE_OPENING} or {@link #STATE_OPEN}.
*
* @return The drawers state.
*/
public int getDrawerState() {
return mDrawerState;
}
/**
* Register a callback to be invoked when the drawer state changes.
*
* @param listener The callback that will run.
*/
public void setOnDrawerStateChangeListener(OnDrawerStateChangeListener listener) {
mOnDrawerStateChangeListener = listener;
}
/**
* Defines whether the drop shadow is enabled.
*
* @param enabled Whether the drop shadow is enabled.
*/
public void setDropShadowEnabled(boolean enabled) {
mDropShadowEnabled = enabled;
invalidate();
}
/**
* Sets the color of the drop shadow.
*
* @param color The color of the drop shadow.
*/
public abstract void setDropShadowColor(int color);
/**
* Sets the width of the drop shadow.
*
* @param width The width of the drop shadow in px.
*/
public void setDropShadowWidth(int width) {
mDropShadowWidth = width;
invalidate();
}
/**
* Animates the drawer slightly open until the user opens the drawer.
*/
public void peekDrawer() {
peekDrawer(DEFAULT_PEEK_START_DELAY, DEFAULT_PEEK_DELAY);
}
/**
* Animates the drawer slightly open. If delay is larger than 0, this happens until the user opens the drawer.
*
* @param delay The delay (in milliseconds) between each run of the animation. If 0, this animation is only run
* once.
*/
public void peekDrawer(long delay) {
peekDrawer(DEFAULT_PEEK_START_DELAY, delay);
}
/**
* Animates the drawer slightly open. If delay is larger than 0, this happens until the user opens the drawer.
*
* @param startDelay The delay (in milliseconds) until the animation is first run.
* @param delay The delay (in milliseconds) between each run of the animation. If 0, this animation is only run
* once.
*/
public void peekDrawer(final long startDelay, final long delay) {
if (startDelay < 0) {
throw new IllegalArgumentException("startDelay must be zero or lager.");
}
if (delay < 0) {
throw new IllegalArgumentException("delay must be zero or lager");
}
removeCallbacks(mPeekRunnable);
removeCallbacks(mPeekStartRunnable);
mPeekDelay = delay;
mPeekStartRunnable = new Runnable() {
@Override
public void run() {
startPeek();
}
};
postDelayed(mPeekStartRunnable, startDelay);
}
/**
* Enables or disables the user of {@link View#LAYER_TYPE_HARDWARE} when animations views.
*
* @param enabled Whether hardware layers are enabled.
*/
public void setHardwareLayerEnabled(boolean enabled) {
if (enabled != mHardwareLayersEnabled) {
mHardwareLayersEnabled = enabled;
mMenuContainer.setHardwareLayersEnabled(enabled);
mContentView.setHardwareLayersEnabled(enabled);
stopLayerTranslation();
}
}
/**
* Sets the drawer state.
*
* @param state The drawer state. Must be one of {@link #STATE_CLOSED}, {@link #STATE_CLOSING},
* {@link #STATE_DRAGGING}, {@link #STATE_OPENING} or {@link #STATE_OPEN}.
*/
protected void setDrawerState(int state) {
if (state != mDrawerState) {
final int oldState = mDrawerState;
mDrawerState = state;
if (mOnDrawerStateChangeListener != null) mOnDrawerStateChangeListener.onDrawerStateChange(oldState, state);
if (DEBUG) logDrawerState(state);
}
}
private void logDrawerState(int state) {
switch (state) {
case STATE_CLOSED:
Log.d(TAG, "[DrawerState] STATE_CLOSED");
break;
case STATE_CLOSING:
Log.d(TAG, "[DrawerState] STATE_CLOSING");
break;
case STATE_DRAGGING:
Log.d(TAG, "[DrawerState] STATE_DRAGGING");
break;
case STATE_OPENING:
Log.d(TAG, "[DrawerState] STATE_OPENING");
break;
case STATE_OPEN:
Log.d(TAG, "[DrawerState] STATE_OPEN");
break;
default:
Log.d(TAG, "[DrawerState] Unknown: " + state);
}
}
/**
* Sets the drawer drag mode. Can be either {@link #MENU_DRAG_CONTENT} or {@link #MENU_DRAG_WINDOW}.
*
* @param dragMode The drag mode.
*/
public void setDragMode(int dragMode) {
mDragMode = dragMode;
}
@Override
protected void dispatchDraw(Canvas canvas) {
super.dispatchDraw(canvas);
final int offsetPixels = mOffsetPixels;
drawMenuOverlay(canvas, offsetPixels);
if (mDropShadowEnabled) drawDropShadow(canvas, offsetPixels);
if (mArrowBitmap != null) drawArrow(canvas, offsetPixels);
}
/**
* Called when the content drop shadow should be drawn.
*
* @param canvas The canvas on which to draw.
* @param offsetPixels Value in pixels indicating the offset.
*/
protected abstract void drawDropShadow(Canvas canvas, int offsetPixels);
/**
* Called when the menu overlay should be drawn.
*
* @param canvas The canvas on which to draw.
* @param offsetPixels Value in pixels indicating the offset.
*/
protected abstract void drawMenuOverlay(Canvas canvas, int offsetPixels);
/**
* Called when the arrow indicator should be drawn.
*
* @param canvas The canvas on which to draw.
* @param offsetPixels Value in pixels indicating the offset.
*/
protected abstract void drawArrow(Canvas canvas, int offsetPixels);
/**
* Sets the number of pixels the content should be offset.
*
* @param offsetPixels The number of pixels to offset the content by.
*/
protected void setOffsetPixels(int offsetPixels) {
if (offsetPixels != mOffsetPixels) {
onOffsetPixelsChanged(offsetPixels);
mOffsetPixels = offsetPixels;
mMenuVisible = offsetPixels != 0;
}
}
/**
* Called when the number of pixels the content should be offset by has changed.
*
* @param offsetPixels The number of pixels to offset the content by.
*/
protected abstract void onOffsetPixelsChanged(int offsetPixels);
/**
* If possible, set the layer type to {@link View#LAYER_TYPE_HARDWARE}.
*/
protected void startLayerTranslation() {
if (USE_TRANSLATIONS && mHardwareLayersEnabled && !mLayerTypeHardware) {
mLayerTypeHardware = true;
mContentView.setLayerType(View.LAYER_TYPE_HARDWARE, null);
mMenuContainer.setLayerType(View.LAYER_TYPE_HARDWARE, null);
}
}
/**
* If the current layer type is {@link View#LAYER_TYPE_HARDWARE}, this will set it to @link View#LAYER_TYPE_NONE}.
*/
private void stopLayerTranslation() {
if (mLayerTypeHardware) {
mLayerTypeHardware = false;
mContentView.setLayerType(View.LAYER_TYPE_NONE, null);
mMenuContainer.setLayerType(View.LAYER_TYPE_NONE, null);
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthMode = MeasureSpec.getMode(widthMeasureSpec);
final int heightMode = MeasureSpec.getMode(heightMeasureSpec);
if (widthMode != MeasureSpec.EXACTLY || heightMode != MeasureSpec.EXACTLY) {
throw new IllegalStateException("Must measure with an exact size");
}
final int width = MeasureSpec.getSize(widthMeasureSpec);
final int height = MeasureSpec.getSize(heightMeasureSpec);
if (!mMenuWidthFromTheme) mMenuWidth = (int) (width * 0.8f);
if (mOffsetPixels == -1) setOffsetPixels(mMenuWidth);
final int menuWidthMeasureSpec = getChildMeasureSpec(widthMeasureSpec, 0, mMenuWidth);
final int menuHeightMeasureSpec = getChildMeasureSpec(widthMeasureSpec, 0, height);
mMenuContainer.measure(menuWidthMeasureSpec, menuHeightMeasureSpec);
final int contentWidthMeasureSpec = getChildMeasureSpec(widthMeasureSpec, 0, width);
final int contentHeightMeasureSpec = getChildMeasureSpec(widthMeasureSpec, 0, height);
mContentView.measure(contentWidthMeasureSpec, contentHeightMeasureSpec);
setMeasuredDimension(width, height);
final int measuredWidth = getMeasuredWidth();
mDragBezelSize = Math.min(measuredWidth / 10, mMaxDragBezelSize);
}
@Override
protected boolean fitSystemWindows(Rect insets) {
if (mDragMode == MENU_DRAG_WINDOW) {
mMenuContainer.setPadding(0, insets.top, 0, 0);
}
return super.fitSystemWindows(insets);
}
/**
* Called when a drag has been ended.
*/
private void endDrag() {
mIsDragging = false;
if (mVelocityTracker != null) {
mVelocityTracker.recycle();
mVelocityTracker = null;
}
}
/**
* Stops ongoing animation of the drawer.
*/
protected void stopAnimation() {
removeCallbacks(mDragRunnable);
mScroller.abortAnimation();
stopLayerTranslation();
}
/**
* Called when a drawer animation has successfully completed.
*/
private void completeAnimation() {
mScroller.abortAnimation();
final int finalX = mScroller.getFinalX();
setOffsetPixels(finalX);
setDrawerState(finalX == 0 ? STATE_CLOSED : STATE_OPEN);
stopLayerTranslation();
}
/**
* Moves the drawer to the position passed.
*
* @param position The position the content is moved to.
* @param velocity Optional velocity if called by releasing a drag event.
* @param animate Whether the move is animated.
*/
protected void animateOffsetTo(int position, int velocity, boolean animate) {
endDrag();
endPeek();
final int startX = mOffsetPixels;
final int dx = position - startX;
if (dx == 0 || !animate) {
setOffsetPixels(position);
setDrawerState(position == 0 ? STATE_CLOSED : STATE_OPEN);
stopLayerTranslation();
return;
}
int duration;
velocity = Math.abs(velocity);
if (velocity > 0) {
duration = 4 * Math.round(1000.f * Math.abs((float) dx / velocity));
} else {
duration = (int) (600.f * Math.abs((float) dx / mMenuWidth));
}
duration = Math.min(duration, DURATION_MAX);
if (dx > 0) {
setDrawerState(STATE_OPENING);
mScroller.startScroll(startX, 0, dx, 0, duration);
} else {
setDrawerState(STATE_CLOSING);
mScroller.startScroll(startX, 0, dx, 0, duration);
}
startLayerTranslation();
postAnimationInvalidate();
}
/**
* Callback when each frame in the drawer animation should be drawn.
*/
private void postAnimationInvalidate() {
if (mScroller.computeScrollOffset()) {
final int oldX = mOffsetPixels;
final int x = mScroller.getCurrX();
if (x != oldX) setOffsetPixels(x);
if (x != mScroller.getFinalX()) {
postOnAnimation(mDragRunnable);
return;
}
}
completeAnimation();
}
/**
* Starts peek drawer animation.
*/
protected void startPeek() {
final int menuWidth = mMenuWidth;
final int dx = menuWidth / 3;
mPeekScroller.startScroll(0, 0, dx, 0, PEEK_DURATION);
startLayerTranslation();
peekDrawerInvalidate();
}
/**
* Callback when each frame in the peek drawer animation should be drawn.
*/
private void peekDrawerInvalidate() {
if (mPeekScroller.computeScrollOffset()) {
final int oldX = mOffsetPixels;
final int x = mPeekScroller.getCurrX();
if (x != oldX) setOffsetPixels(x);
if (!mPeekScroller.isFinished()) {
postOnAnimation(mPeekRunnable);
return;
} else if (mPeekDelay > 0) {
mPeekStartRunnable = new Runnable() {
@Override
public void run() {
startPeek();
}
};
postDelayed(mPeekStartRunnable, mPeekDelay);
}
}
completePeek();
}
/**
* Called when the peek drawer animation has successfully completed.
*/
private void completePeek() {
mPeekScroller.abortAnimation();
setOffsetPixels(0);
setDrawerState(STATE_CLOSED);
stopLayerTranslation();
}
/**
* Stops ongoing peek drawer animation.
*/
protected void endPeek() {
removeCallbacks(mPeekStartRunnable);
removeCallbacks(mPeekRunnable);
stopLayerTranslation();
}
@Override
public void postOnAnimation(Runnable action) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
super.postOnAnimation(action);
} else {
postDelayed(action, ANIMATION_DELAY);
}
}
protected boolean isCloseEnough() {
return mOffsetPixels <= mCloseEnough;
}
/**
* Returns true if the touch event occurs over the content.
*
* @param ev The motion event.
* @return True if the touch event occured over the content, false otherwise.
*/
protected abstract boolean isContentTouch(MotionEvent ev);
/**
* Returns true if dragging the content should be allowed.
*
* @param ev The motion event.
* @return True if dragging the content should be allowed, false otherwise.
*/
protected abstract boolean onDownAllowDrag(MotionEvent ev);
/**
* Returns true if dragging the content should be allowed.
*
* @param ev The motion event.
* @return True if dragging the content should be allowed, false otherwise.
*/
protected abstract boolean onMoveAllowDrag(MotionEvent ev);
/**
* Called when a move event has happened while dragging the content is in progress.
*
* @param dx The X difference between the last motion event and the current motion event.
*/
protected abstract void onMoveEvent(float dx);
/**
* Called when {@link MotionEvent#ACTION_UP} of {@link MotionEvent#ACTION_CANCEL} is delivered to
* {@link MenuDrawer#onTouchEvent(android.view.MotionEvent)}.
*
* @param ev The motion event.
*/
protected abstract void onUpEvent(MotionEvent ev);
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
final int action = ev.getAction() & MotionEvent.ACTION_MASK;
if (action == MotionEvent.ACTION_DOWN && mMenuVisible && isCloseEnough()) {
setOffsetPixels(0);
stopAnimation();
endPeek();
setDrawerState(STATE_CLOSED);
}
// Always intercept events over the content while menu is visible.
if (mMenuVisible && isContentTouch(ev)) return true;
if (action != MotionEvent.ACTION_DOWN) {
if (mIsDragging) return true;
}
switch (action) {
case MotionEvent.ACTION_DOWN: {
mLastMotionX = mInitialMotionX = ev.getX();
mLastMotionY = ev.getY();
final boolean allowDrag = onDownAllowDrag(ev);
if (allowDrag) {
setDrawerState(mMenuVisible ? STATE_OPEN : STATE_CLOSED);
stopAnimation();
endPeek();
mIsDragging = false;
}
break;
}
case MotionEvent.ACTION_MOVE: {
final float x = ev.getX();
final float dx = x - mLastMotionX;
final float xDiff = Math.abs(dx);
final float y = ev.getY();
final float yDiff = Math.abs(y - mLastMotionY);
if (xDiff > mTouchSlop && xDiff > yDiff) {
final boolean allowDrag = onMoveAllowDrag(ev);
if (allowDrag) {
setDrawerState(STATE_DRAGGING);
mIsDragging = true;
mLastMotionX = x;
mLastMotionY = y;
}
}
break;
}
/**
* If you click really fast, an up or cancel event is delivered here.
* Just snap content to whatever is closest.
* */
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP: {
final int offsetPixels = mOffsetPixels;
animateOffsetTo(offsetPixels > mMenuWidth / 2 ? mMenuWidth : 0, 0, true);
break;
}
}
if (mVelocityTracker == null) mVelocityTracker = VelocityTracker.obtain();
mVelocityTracker.addMovement(ev);
return mIsDragging;
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
final int action = ev.getAction() & MotionEvent.ACTION_MASK;
if (mVelocityTracker == null) mVelocityTracker = VelocityTracker.obtain();
mVelocityTracker.addMovement(ev);
switch (action) {
case MotionEvent.ACTION_DOWN: {
mLastMotionX = mInitialMotionX = ev.getX();
mLastMotionY = ev.getY();
final boolean allowDrag = onDownAllowDrag(ev);
if (allowDrag) {
stopAnimation();
endPeek();
startLayerTranslation();
}
break;
}
case MotionEvent.ACTION_MOVE: {
if (!mIsDragging) {
final float x = ev.getX();
final float xDiff = Math.abs(x - mLastMotionX);
final float y = ev.getY();
final float yDiff = Math.abs(y - mLastMotionY);
if (xDiff > mTouchSlop && xDiff > yDiff) {
final boolean allowDrag = onMoveAllowDrag(ev);
if (allowDrag) {
setDrawerState(STATE_DRAGGING);
mIsDragging = true;
mLastMotionX = x - mInitialMotionX > 0
? mInitialMotionX + mTouchSlop
: mInitialMotionX - mTouchSlop;
}
}
}
if (mIsDragging) {
startLayerTranslation();
final float x = ev.getX();
final float dx = x - mLastMotionX;
mLastMotionX = x;
onMoveEvent(dx);
}
break;
}
case MotionEvent.ACTION_CANCEL:
case MotionEvent.ACTION_UP: {
onUpEvent(ev);
break;
}
}
return true;
}
/**
* Saves the state of the drawer.
*
* @return Returns a Parcelable containing the drawer state.
*/
public Parcelable saveState() {
Bundle state = new Bundle();
final boolean menuVisible = mDrawerState == STATE_OPEN || mDrawerState == STATE_OPENING;
state.putBoolean(STATE_MENU_VISIBLE, menuVisible);
return state;
}
/**
* Restores the state of the drawer.
*
* @param in A parcelable containing the drawer state.
*/
public void restoreState(Parcelable in) {
Bundle state = (Bundle) in;
final boolean menuOpen = state.getBoolean(STATE_MENU_VISIBLE);
setOffsetPixels(menuOpen ? mMenuWidth : 0);
mDrawerState = menuOpen ? STATE_OPEN : STATE_CLOSED;
}
}
|
package de.lmu.ifi.dbs.algorithm.clustering;
import de.lmu.ifi.dbs.algorithm.AbstractAlgorithm;
import de.lmu.ifi.dbs.algorithm.Algorithm;
import de.lmu.ifi.dbs.algorithm.result.PartitionResults;
import de.lmu.ifi.dbs.algorithm.result.Result;
import de.lmu.ifi.dbs.data.RealVector;
import de.lmu.ifi.dbs.database.AssociationID;
import de.lmu.ifi.dbs.database.Database;
import de.lmu.ifi.dbs.preprocessing.HiCOPreprocessor;
import de.lmu.ifi.dbs.properties.Properties;
import de.lmu.ifi.dbs.utilities.Description;
import de.lmu.ifi.dbs.utilities.Progress;
import de.lmu.ifi.dbs.utilities.UnableToComplyException;
import de.lmu.ifi.dbs.utilities.Util;
import de.lmu.ifi.dbs.utilities.optionhandling.AttributeSettings;
import de.lmu.ifi.dbs.utilities.optionhandling.ClassParameter;
import de.lmu.ifi.dbs.utilities.optionhandling.ParameterException;
import de.lmu.ifi.dbs.utilities.optionhandling.WrongParameterValueException;
import de.lmu.ifi.dbs.varianceanalysis.LocalPCA;
import java.util.*;
/**
* Algorithm to partition a database according to the correlation dimension of
* its objects and to then perform an arbitrary algorithm over the partitions.
*
* @author Elke Achtert (<a href="mailto:achtert@dbs.ifi.lmu.de">achtert@dbs.ifi.lmu.de</a>)
*/
public class COPAA extends AbstractAlgorithm<RealVector> {
/**
* Parameter for preprocessor.
*/
public static final String PREPROCESSOR_P = "preprocessor";
/**
* Description for parameter preprocessor.
*/
public static final String PREPROCESSOR_D = "preprocessor to derive partition criterion " +
Properties.KDD_FRAMEWORK_PROPERTIES.restrictionString(HiCOPreprocessor.class) +
".";
/**
* Parameter for partition algorithm.
*/
public static final String PARTITION_ALGORITHM_P = "partAlg";
/**
* Description for parameter partition algorithm
*/
public static final String PARTITION_ALGORITHM_D = "algorithm to apply to each partition" +
Properties.KDD_FRAMEWORK_PROPERTIES.restrictionString(Algorithm.class) +
".";
/**
* Parameter for class of partition database.
*/
public static final String PARTITION_DATABASE_CLASS_P = "partDB";
/**
* Description for parameter partition database.
*/
public static final String PARTITION_DATABASE_CLASS_D = "database class for each partition " +
Properties.KDD_FRAMEWORK_PROPERTIES.restrictionString(Database.class) +
". If this parameter is not set, the databases of the partitions have the same class as the original database.";
/**
* Holds the preprocessor.
*/
protected HiCOPreprocessor preprocessor;
/**
* Holds the partitioning algorithm.
*/
protected Algorithm<RealVector> partitionAlgorithm;
/**
* Holds the class of the partition databases.
*/
protected Class<Database> partitionDatabase;
/**
* Holds the parameters of the partition databases.
*/
protected String[] partitionDatabaseParameters;
/**
* Holds the result.
*/
private PartitionResults<RealVector> result;
/**
* Sets the specific parameters additionally to the parameters set by the
* super-class.
*/
public COPAA() {
super();
//parameter preprocessor
optionHandler.put(PREPROCESSOR_P, new ClassParameter(PREPROCESSOR_P, PREPROCESSOR_D, HiCOPreprocessor.class));
// parameter partition algorithm
optionHandler.put(PARTITION_ALGORITHM_P, new ClassParameter(PARTITION_ALGORITHM_P, PARTITION_ALGORITHM_D, Algorithm.class));
// parameter partition database class
ClassParameter pdc = new ClassParameter(PARTITION_DATABASE_CLASS_P, PARTITION_DATABASE_CLASS_D, Database.class);
pdc.setOptional(true);
optionHandler.put(PARTITION_DATABASE_CLASS_P, pdc);
}
/**
* @see de.lmu.ifi.dbs.algorithm.Algorithm#run(de.lmu.ifi.dbs.database.Database)
*/
protected void runInTime(Database<RealVector> database) throws IllegalStateException {
// preprocessing
if (isVerbose()) {
verbose("\ndb size = " + database.size());
verbose("dimensionality = " + database.dimensionality());
}
preprocessor.run(database, isVerbose(), isTime());
// partitioning
if (isVerbose()) {
verbose("\npartitioning...");
}
Map<Integer, List<Integer>> partitionMap = new Hashtable<Integer, List<Integer>>();
Progress partitionProgress = new Progress("Partitioning", database.size());
int processed = 1;
for (Iterator<Integer> dbiter = database.iterator(); dbiter.hasNext();) {
Integer id = dbiter.next();
Integer corrdim = ((LocalPCA) database.getAssociation(AssociationID.LOCAL_PCA, id)).getCorrelationDimension();
if (!partitionMap.containsKey(corrdim)) {
partitionMap.put(corrdim, new ArrayList<Integer>());
}
partitionMap.get(corrdim).add(id);
if (isVerbose()) {
partitionProgress.setProcessed(processed++);
progress(partitionProgress);
}
}
if (isVerbose()) {
partitionProgress.setProcessed(database.size());
progress(partitionProgress);
for (Integer corrDim : partitionMap.keySet()) {
List<Integer> list = partitionMap.get(corrDim);
verbose("Partition " + corrDim + " = " + list.size() + " objects.");
}
}
// running partition algorithm
result = runPartitionAlgorithm(database, partitionMap);
}
/**
* @see de.lmu.ifi.dbs.algorithm.Algorithm#getResult()
*/
public Result<RealVector> getResult() {
return result;
}
/**
* @see de.lmu.ifi.dbs.algorithm.Algorithm#getDescription()
*/
public Description getDescription() {
return new Description("COPAA",
"COrrelation PArtitioning Algorithm",
"Partitions a database according to the correlation dimension of its objects and performs an arbitrary algorithm over the partitions.",
"unpublished");
}
/**
* Returns the the partitioning algorithm.
*
* @return the the partitioning algorithm
*/
public Algorithm<RealVector> getPartitionAlgorithm() {
return partitionAlgorithm;
}
/**
* @see de.lmu.ifi.dbs.utilities.optionhandling.Parameterizable#description()
*/
@Override
public String description() {
StringBuffer description = new StringBuffer();
description.append(optionHandler.usage("", false));
description.append('\n');
description.append("Remaining parameters are firstly given to the partition algorithm, then to the preprocessor.");
description.append('\n');
description.append('\n');
return description.toString();
}
/**
* Passes remaining parameters first to the partition algorithm, then to the
* preprocessor.
*
* @see de.lmu.ifi.dbs.utilities.optionhandling.Parameterizable#setParameters(String[])
*/
@Override
public String[] setParameters(String[] args) throws ParameterException {
String[] remainingParameters = super.setParameters(args);
// partition algorithm
String partAlgString = (String) optionHandler.getOptionValue(PARTITION_ALGORITHM_P);
try {
// noinspection unchecked
partitionAlgorithm = Util.instantiate(Algorithm.class, partAlgString);
}
catch (UnableToComplyException e) {
throw new WrongParameterValueException(PARTITION_ALGORITHM_P, partAlgString, PARTITION_ALGORITHM_D);
}
// partition db
if (optionHandler.isSet(PARTITION_DATABASE_CLASS_P)) {
String partDBString = (String) optionHandler.getOptionValue(PARTITION_DATABASE_CLASS_P);
try {
Database tmpDB = Util.instantiate(Database.class, partDBString);
remainingParameters = tmpDB.setParameters(remainingParameters);
partitionDatabaseParameters = tmpDB.getParameters();
// noinspection unchecked
partitionDatabase = (Class<Database>) tmpDB.getClass();
}
catch (UnableToComplyException e) {
throw new WrongParameterValueException(PARTITION_DATABASE_CLASS_P, partDBString, PARTITION_DATABASE_CLASS_D, e);
}
}
// preprocessor
String preprocessorString = (String) optionHandler.getOptionValue(PREPROCESSOR_P);
try {
preprocessor = Util.instantiate(HiCOPreprocessor.class, preprocessorString);
}
catch (UnableToComplyException e) {
throw new WrongParameterValueException(PREPROCESSOR_P, preprocessorString, PREPROCESSOR_D, e);
}
remainingParameters = preprocessor.setParameters(remainingParameters);
remainingParameters = partitionAlgorithm.setParameters(remainingParameters);
partitionAlgorithm.setTime(isTime());
partitionAlgorithm.setVerbose(isVerbose());
setParameters(args, remainingParameters);
return remainingParameters;
}
/**
* Sets whether the time should be assessed.
*
* @param time whether the time should be assessed
*/
public void setTime(boolean time) {
super.setTime(time);
partitionAlgorithm.setTime(time);
}
/**
* Sets whether verbose messages should be printed while executing the
* algorithm.
*
* @param verbose whether verbose messages should be printed while executing the
* algorithm
*/
public void setVerbose(boolean verbose) {
super.setVerbose(verbose);
partitionAlgorithm.setVerbose(verbose);
}
/**
* Returns the parameter setting of the attributes.
*
* @return the parameter setting of the attributes
*/
public List<AttributeSettings> getAttributeSettings() {
List<AttributeSettings> result = super.getAttributeSettings();
AttributeSettings settings = result.get(0);
settings.addSetting(PREPROCESSOR_P, preprocessor.getClass().getName());
settings.addSetting(PARTITION_ALGORITHM_P, partitionAlgorithm.getClass().getName());
if (optionHandler.isSet(PARTITION_DATABASE_CLASS_P)) {
settings.addSetting(PARTITION_DATABASE_CLASS_P, partitionDatabase.getName());
}
result.addAll(preprocessor.getAttributeSettings());
result.addAll(partitionAlgorithm.getAttributeSettings());
if (optionHandler.isSet(PARTITION_DATABASE_CLASS_P)) {
try {
Database tmpDB = Util.instantiate(Database.class, partitionDatabase.getName());
result.addAll(tmpDB.getAttributeSettings());
}
catch (UnableToComplyException e) {
// tested before
throw new RuntimeException("This should never happen!");
}
}
return result;
}
/**
* Runs the partition algorithm and creates the result.
*
* @param database the database to run this algorithm on
* @param partitionMap the map of partition IDs to object ids
*/
protected PartitionResults<RealVector> runPartitionAlgorithm(Database<RealVector> database, Map<Integer, List<Integer>> partitionMap) {
try {
Map<Integer, Database<RealVector>> databasePartitions = database.partition(partitionMap, partitionDatabase, partitionDatabaseParameters);
Map<Integer, Result<RealVector>> results = new Hashtable<Integer, Result<RealVector>>();
for (Integer partitionID : databasePartitions.keySet()) {
if (isVerbose()) {
verbose("\nRunning " + partitionAlgorithm.getDescription().getShortTitle() + " on partition " + partitionID);
}
partitionAlgorithm.run(databasePartitions.get(partitionID));
results.put(partitionID, partitionAlgorithm.getResult());
}
return new PartitionResults<RealVector>(database, results);
}
catch (UnableToComplyException e) {
throw new IllegalStateException(e);
}
}
}
|
package de.lmu.ifi.dbs.algorithm.clustering;
import de.lmu.ifi.dbs.algorithm.Algorithm;
import de.lmu.ifi.dbs.algorithm.result.PartitionResults;
import de.lmu.ifi.dbs.algorithm.result.clustering.ClusteringResult;
import de.lmu.ifi.dbs.algorithm.result.clustering.PartitionClusteringResults;
import de.lmu.ifi.dbs.data.RealVector;
import de.lmu.ifi.dbs.database.Database;
import de.lmu.ifi.dbs.utilities.Description;
import de.lmu.ifi.dbs.utilities.UnableToComplyException;
import de.lmu.ifi.dbs.utilities.optionhandling.OptionHandler;
import de.lmu.ifi.dbs.utilities.optionhandling.WrongParameterValueException;
import de.lmu.ifi.dbs.utilities.optionhandling.ParameterException;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
/**
* Algorithm to partition a database according to the correlation dimension of
* its objects and to then perform an arbitrary algorithm over the partitions.
*
* @author Arthur Zimek (<a
* href="mailto:zimek@dbs.ifi.lmu.de">zimek@dbs.ifi.lmu.de</a>)
*/
public class COPAC extends COPAA implements Clustering<RealVector> {
/**
* Description for parameter partitioning algorithm
*/
public static final String PARTITION_ALGORITHM_D = "<classname>algorithm to apply to each partition - must implement " + Clustering.class.getName() + ".";
/**
* Sets the specific parameters additionally to the parameters set by the
* super-class.
*/
public COPAC() {
super();
// put in the right description
parameterToDescription.remove(COPAA.PARTITION_ALGORITHM_P + OptionHandler.EXPECTS_VALUE);
parameterToDescription.put(COPAA.PARTITION_ALGORITHM_P + OptionHandler.EXPECTS_VALUE, PARTITION_ALGORITHM_D);
optionHandler = new OptionHandler(parameterToDescription, this.getClass().getName());
}
/**
* Passes remaining parameters first to the partition algorithm, then to the
* preprocessor.
*
* @see de.lmu.ifi.dbs.utilities.optionhandling.Parameterizable#setParameters(String[])
*/
@Override
public String[] setParameters(String[] args) throws ParameterException {
String[] remainingParameters = super.setParameters(args);
if (! (getPartitionAlgorithm() instanceof Clustering)) {
throw new WrongParameterValueException(PARTITION_ALGORITHM_P, optionHandler.getOptionValue(PARTITION_ALGORITHM_P), PARTITION_ALGORITHM_D);
}
return remainingParameters;
}
/**
* @see Clustering#getResult()
*/
public ClusteringResult<RealVector> getResult() {
return (ClusteringResult<RealVector>) super.getResult();
}
/**
* @see Algorithm#getDescription()
*/
public Description getDescription() {
return new Description("COPAC", "Correlation Partition Clustering", "Partitions a database according to the correlation dimension of its objects and performs a clustering algorithm over the partitions.", "unpublished");
}
/**
* Runs the partition algorithm and creates the result.
*
* @param database the database to run this algorithm on
* @param partitionMap the map of partition IDs to object ids
*/
protected PartitionResults<RealVector> runPartitionAlgorithm(Database<RealVector> database, Map<Integer, List<Integer>> partitionMap) {
try {
Map<Integer, Database<RealVector>> databasePartitions = database.partition(partitionMap);
Map<Integer, ClusteringResult<RealVector>> results = new Hashtable<Integer, ClusteringResult<RealVector>>();
Clustering<RealVector> partitionAlgorithm = (Clustering<RealVector>) getPartitionAlgorithm();
for (Integer partitionID : databasePartitions.keySet()) {
if (isVerbose()) {
System.out.println("\nRunning " + partitionAlgorithm.getDescription().getShortTitle() + " on partition " + partitionID);
}
partitionAlgorithm.run(databasePartitions.get(partitionID));
results.put(partitionID, partitionAlgorithm.getResult());
}
return new PartitionClusteringResults<RealVector>(database, results, database.dimensionality());
}
catch (UnableToComplyException e) {
throw new IllegalStateException(e);
}
}
}
|
package de.lmu.ifi.dbs.elki.parser;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.lmu.ifi.dbs.elki.data.DatabaseObject;
import de.lmu.ifi.dbs.elki.logging.Logging;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.WrongParameterValueException;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.Parameterization;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.PatternParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.StringParameter;
/**
* Abstract superclass for all parsers providing the option handler for handling
* options.
*
* @author Arthur Zimek
* @param <O> object type
*/
public abstract class AbstractParser<O extends DatabaseObject> implements Parser<O> {
/**
* A pattern defining whitespace.
*/
public static final String WHITESPACE_PATTERN = "\\s+";
/**
* A quote pattern
*/
public static final String QUOTE_PATTERN = "\"";
/**
* A pattern catching most numbers that can be parsed using Double.valueOf:
*
* Some examples: <code>1</code> <code>1.</code> <code>1.2</code>
* <code>.2</code> <code>-.2e-03</code>
*/
public static final String NUMBER_PATTERN = "[+-]?(?:\\d+\\.?|\\d*\\.\\d+)?(?:[eE][-]?\\d+)?";
/**
* OptionID for the column separator parameter (defaults to whitespace as in
* {@link #WHITESPACE_PATTERN}.
*/
private static final OptionID COLUMN_SEPARATOR_ID = OptionID.getOrCreateOptionID("parser.colsep", "Column separator pattern. The default assumes whitespace separated data.");
/**
* OptionID for the quote character parameter (defaults to a double quotation
* mark as in {@link #QUOTE_PATTERN}.
*/
private static final OptionID QUOTE_ID = OptionID.getOrCreateOptionID("parser.quote", "Quotation character. The default is to use a double quote.");
/**
* Stores the column separator pattern
*/
private Pattern colSep = null;
/**
* Stores the quotation character
*/
protected char quoteChar = 0;
/**
* The comment character.
*/
public static final String COMMENT = "
/**
* A sign to separate attributes.
*/
public static final String ATTRIBUTE_CONCATENATION = " ";
/**
* AbstractParser already provides the option handler.
*
* @param config Parameterization
*/
protected AbstractParser(Parameterization config) {
super();
config = config.descend(this);
PatternParameter colParam = new PatternParameter(COLUMN_SEPARATOR_ID, WHITESPACE_PATTERN);
if(config.grab(colParam)) {
colSep = colParam.getValue();
}
// TODO: length restriction!
StringParameter quoteParam = new StringParameter(QUOTE_ID, QUOTE_PATTERN);
if(config.grab(quoteParam)) {
if(quoteParam.getValue().length() > 1) {
config.reportError(new WrongParameterValueException(quoteParam, quoteParam.getValue(), "Quote charater may only be a single character."));
}
else if(quoteParam.getValue().length() == 1) {
quoteChar = quoteParam.getValue().charAt(0);
}
else {
quoteChar = 0;
}
}
}
/**
* Tokenize a string. Works much like colSep.split() except it honors
* quotation characters.
*
* @param s
* @return Tokenized string
*/
protected List<String> tokenize(String input) {
ArrayList<String> matchList = new ArrayList<String>();
Matcher m = colSep.matcher(input);
int index = 0;
boolean inquote = (input.length() > 0) && (input.charAt(0) == quoteChar);
while(m.find()) {
// Quoted code path vs. regular code path
if(inquote && m.start() > 0) {
// Closing quote found?
if(m.start() > index + 1 && input.charAt(m.start() - 1) == quoteChar) {
// Strip quote characters
matchList.add(input.subSequence(index + 1, m.start() - 1).toString());
// Seek past
index = m.end();
// new quote?
inquote = (index < input.length()) && (input.charAt(index) == quoteChar);
}
}
else {
// Add match before separator
matchList.add(input.subSequence(index, m.start()).toString());
// Seek past separator
index = m.end();
// new quote?
inquote = (index < input.length()) && (input.charAt(index) == quoteChar);
}
}
// Nothing found - return original string.
if(index == 0) {
matchList.add(input);
return matchList;
}
// Add tail after last separator.
if(inquote) {
if(input.charAt(input.length() - 1) == quoteChar) {
matchList.add(input.subSequence(index + 1, input.length() - 1).toString());
}
else {
getLogger().warning("Invalid quoted line in input.");
matchList.add(input.subSequence(index, input.length()).toString());
}
}
else {
matchList.add(input.subSequence(index, input.length()).toString());
}
// Return
return matchList;
}
/**
* Get the logger for this class.
*
* @return Logger.
*/
protected abstract Logging getLogger();
/**
* Returns a string representation of the object.
*
* @return a string representation of the object.
*/
@Override
public String toString() {
return getClass().getName();
}
}
|
package controllers;
import org.omg.CORBA.Current;
import models.Business;
import models.Collaborator;
import models.User;
import views.html.register;
import play.data.Form;
import play.mvc.Controller;
import play.mvc.Result;
public class Register extends Controller {
static Form<UserRegister> regUser = Form.form(UserRegister.class);
static Form<TypeRegister> regType = Form.form(TypeRegister.class);
public String type;
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public static Result isValid() {
regUser = Form.form(UserRegister.class).bindFromRequest();
regType = Form.form(TypeRegister.class).bindFromRequest();
if (regType.hasErrors()) {
return badRequest(register.render(regType, regUser));
} else {
return badRequest(register.render(regType, regUser));
// return redirect(routes.Application.index());
}
}
public static Result userRegister() {
regUser = Form.form(UserRegister.class).bindFromRequest();
regType = Form.form(TypeRegister.class).bindFromRequest();
if (regUser.hasErrors()) {
return badRequest(register.render(regType, regUser));
} else {
return badRequest(register.render(regType, regUser));
// return redirect(routes.Application.index());
}
}
public static Result register() {
// return ok(register.render(Form.form(Register.class)));
regUser = Form.form(UserRegister.class).bindFromRequest();
regType = Form.form(TypeRegister.class).bindFromRequest();
return ok(register.render(regType, regUser));
}
public static class UserRegister {
public String username;
public String password;
public String name;
public String email;
public String validate() {
String validado = "Error";
if (username != null && username.compareTo("") != 0) {
validado = null;
session().put("typeRegister", "user");
}
else {
validado = "";
}
return validado;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}
public static class TypeRegister {
public String type;
public String validate() {
String validado = "Error";
if (type != null && type.compareTo("") != 0) {
session().put("typeRegister", type);
validado = null;
} else if (session().get("typeRegister") != null
&& session().get("typeRegister").compareTo("") != 0
&& session().get("typeRegister").compareTo("user") == 0) {
session().put("typeRegister", "");
} else {
session().put("typeRegister", "");
validado="";
}
return validado;
}
}
}
|
package dr.app.beauti.treespanel;
import dr.app.beauti.BeautiFrame;
import dr.app.tools.TemporalRooting;
import dr.evolution.tree.Tree;
import dr.gui.chart.*;
import dr.gui.tree.JTreeDisplay;
import dr.gui.tree.SquareTreePainter;
import dr.stats.Regression;
import javax.swing.*;
import java.awt.*;
/**
* @author Andrew Rambaut
* @author Alexei Drummond
* @version $Id: PriorsPanel.java,v 1.9 2006/09/05 13:29:34 rambaut Exp $
*/
public class TreeDisplayPanel extends JPanel {
private Tree tree = null;
BeautiFrame frame = null;
JTabbedPane tabbedPane = new JTabbedPane();
JTreeDisplay treePanel;
JTreeDisplay scaledTreePanel;
JChartPanel rootToTipPanel;
JChart rootToTipChart;
public TreeDisplayPanel(BeautiFrame parent) {
super(new BorderLayout());
this.frame = parent;
treePanel = new JTreeDisplay(new SquareTreePainter());
tabbedPane.add("Tree", treePanel);
rootToTipChart = new JChart(new LinearAxis(), new LinearAxis(Axis.AT_ZERO, Axis.AT_MINOR_TICK));
rootToTipPanel = new JChartPanel(rootToTipChart, "", "time", "divergence");
rootToTipPanel.setOpaque(false);
tabbedPane.add("Root-to-tip", rootToTipPanel);
scaledTreePanel = new JTreeDisplay(new SquareTreePainter());
tabbedPane.add("Re-scaled tree", scaledTreePanel);
setOpaque(false);
add(tabbedPane, BorderLayout.CENTER);
}
public void setTree(Tree tree) {
this.tree = tree;
setupPanel();
}
private void setupPanel() {
if (tree != null) {
treePanel.setTree(tree);
TemporalRooting temporalRooting = new TemporalRooting(tree);
Regression r = temporalRooting.getRootToTipRegression(tree);
rootToTipChart.removeAllPlots();
rootToTipChart.addPlot(new ScatterPlot(r.getXData(), r.getYData()));
rootToTipChart.addPlot(new RegressionPlot(r));
rootToTipChart.getXAxis().addRange(r.getXIntercept(), r.getXData().getMax());
scaledTreePanel.setTree(temporalRooting.adjustTreeToConstraints(tree, null));
} else {
treePanel.setTree(null);
rootToTipChart.removeAllPlots();
scaledTreePanel.setTree(null);
}
repaint();
}
}
|
package dr.evomodel.antigenic;
import dr.evolution.util.*;
import dr.inference.model.*;
import dr.math.MathUtils;
import dr.math.distributions.NormalDistribution;
import dr.util.*;
import dr.xml.*;
import java.io.*;
import java.util.*;
import java.util.logging.Logger;
/**
* @author Andrew Rambaut
* @author Trevor Bedford
* @author Marc Suchard
* @version $Id$
*/
public class AntigenicLikelihood extends AbstractModelLikelihood implements Citable {
public final static String ANTIGENIC_LIKELIHOOD = "antigenicLikelihood";
// column indices in table
private static final int COLUMN_LABEL = 0;
private static final int SERUM_STRAIN = 2;
private static final int ROW_LABEL = 1;
private static final int VIRUS_STRAIN = 3;
private static final int SERUM_DATE = 4;
private static final int VIRUS_DATE = 5;
private static final int RAW_TITRE = 6;
private static final int MIN_TITRE = 7;
private static final int MAX_TITRE = 8;
public enum MeasurementType {
INTERVAL,
POINT,
UPPER_BOUND,
LOWER_BOUND,
MISSING
}
public AntigenicLikelihood(
int mdsDimension,
Parameter mdsPrecisionParameter,
TaxonList strainTaxa,
MatrixParameter locationsParameter,
Parameter datesParameter,
Parameter columnParameter,
Parameter rowParameter,
DataTable<String[]> dataTable,
List<String> virusLocationStatisticList) {
super(ANTIGENIC_LIKELIHOOD);
List<String> strainNames = new ArrayList<String>();
Map<String, Double> strainDateMap = new HashMap<String, Double>();
for (int i = 0; i < dataTable.getRowCount(); i++) {
String[] values = dataTable.getRow(i);
int column = columnLabels.indexOf(values[COLUMN_LABEL]);
if (column == -1) {
columnLabels.add(values[0]);
column = columnLabels.size() - 1;
}
int columnStrain = -1;
if (strainTaxa != null) {
columnStrain = strainTaxa.getTaxonIndex(values[SERUM_STRAIN]);
} else {
columnStrain = strainNames.indexOf(values[SERUM_STRAIN]);
if (columnStrain == -1) {
strainNames.add(values[SERUM_STRAIN]);
Double date = Double.parseDouble(values[VIRUS_DATE]);
strainDateMap.put(values[SERUM_STRAIN], date);
columnStrain = strainNames.size() - 1;
}
}
if (columnStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized serum strain name, " + values[SERUM_STRAIN] + ", in row " + (i+1));
}
int row = rowLabels.indexOf(values[ROW_LABEL]);
if (row == -1) {
rowLabels.add(values[ROW_LABEL]);
row = rowLabels.size() - 1;
}
int rowStrain = -1;
if (strainTaxa != null) {
rowStrain = strainTaxa.getTaxonIndex(values[VIRUS_STRAIN]);
} else {
rowStrain = strainNames.indexOf(values[VIRUS_STRAIN]);
if (rowStrain == -1) {
strainNames.add(values[VIRUS_STRAIN]);
Double date = Double.parseDouble(values[VIRUS_DATE]);
strainDateMap.put(values[VIRUS_STRAIN], date);
rowStrain = strainNames.size() - 1;
}
}
if (rowStrain == -1) {
throw new IllegalArgumentException("Error reading data table: Unrecognized virus strain name, " + values[VIRUS_STRAIN] + ", in row " + (i+1));
}
double minTitre = Double.NaN;
try {
if (values[MIN_TITRE].length() > 0) {
try {
minTitre = Double.parseDouble(values[MIN_TITRE]);
} catch (NumberFormatException nfe) {
// do nothing
}
}
} catch (ArrayIndexOutOfBoundsException e) {
// do nothing
}
double maxTitre = Double.NaN;
try {
if (values[MAX_TITRE].length() > 0) {
try {
maxTitre = Double.parseDouble(values[MAX_TITRE]);
} catch (NumberFormatException nfe) {
// do nothing
}
}
} catch (ArrayIndexOutOfBoundsException e) {
// do nothing
}
// use this if minTitre and maxTitre are not defined in HI file
double rawTitre = Double.NaN;
if (values[RAW_TITRE].length() > 0) {
try {
rawTitre = Double.parseDouble(values[RAW_TITRE]);
maxTitre = rawTitre;
minTitre = rawTitre;
} catch (NumberFormatException nfe) {
// check if threshold below
if (values[RAW_TITRE].contains("<")) {
rawTitre = Double.parseDouble(values[RAW_TITRE].replace("<",""));
maxTitre = rawTitre;
minTitre = 0.0;
}
// check if threshold above
if (values[RAW_TITRE].contains(">")) {
rawTitre = Double.parseDouble(values[RAW_TITRE].replace(">",""));
minTitre = rawTitre;
maxTitre = Double.NaN;
}
}
}
MeasurementType type = MeasurementType.INTERVAL;
if (minTitre == maxTitre) {
type = MeasurementType.POINT;
}
if (Double.isNaN(minTitre) || minTitre == 0.0) {
if (Double.isNaN(maxTitre)) {
throw new IllegalArgumentException("Error in measurement: both min and max titre are at bounds in row " + (i+1));
}
type = MeasurementType.UPPER_BOUND;
} else if (Double.isNaN(maxTitre)) {
type = MeasurementType.LOWER_BOUND;
}
Measurement measurement = new Measurement(column, columnStrain, row, rowStrain, type, minTitre, maxTitre);
measurements.add(measurement);
}
double[] maxColumnTitre = new double[columnLabels.size()];
double[] maxRowTitre = new double[rowLabels.size()];
for (Measurement measurement : measurements) {
double titre = measurement.maxTitre;
if (Double.isNaN(titre)) {
titre = measurement.minTitre;
}
if (titre > maxColumnTitre[measurement.column]) {
maxColumnTitre[measurement.column] = titre;
}
if (titre > maxRowTitre[measurement.row]) {
maxRowTitre[measurement.row] = titre;
}
}
if (strainTaxa != null) {
this.strains = strainTaxa;
// fill in the strain name array for local use
for (int i = 0; i < strains.getTaxonCount(); i++) {
strainNames.add(strains.getTaxon(i).getId());
}
} else {
Taxa taxa = new Taxa();
for (String strain : strainNames) {
taxa.addTaxon(new Taxon(strain));
}
this.strains = taxa;
}
this.mdsDimension = mdsDimension;
this.mdsPrecisionParameter = mdsPrecisionParameter;
addVariable(mdsPrecisionParameter);
this.locationsParameter = locationsParameter;
setupLocationsParameter(this.locationsParameter, strainNames);
addVariable(this.locationsParameter);
if (datesParameter != null) {
// this parameter is not used in this class but is setup to be used in other classes
datesParameter.setDimension(strainNames.size());
String[] labelArray = new String[strainNames.size()];
strainNames.toArray(labelArray);
datesParameter.setDimensionNames(labelArray);
for (int i = 0; i < strainNames.size(); i++) {
Double date = strainDateMap.get(strainNames.get(i));
if (date == null) {
throw new IllegalArgumentException("Date missing for strain: " + strainNames.get(i));
}
datesParameter.setParameterValue(i, date);
}
}
if (columnParameter == null) {
this.columnEffectsParameter = new Parameter.Default("columnEffects");
} else {
this.columnEffectsParameter = columnParameter;
}
this.columnEffectsParameter.setDimension(columnLabels.size());
addVariable(this.columnEffectsParameter);
String[] labelArray = new String[columnLabels.size()];
columnLabels.toArray(labelArray);
this.columnEffectsParameter.setDimensionNames(labelArray);
for (int i = 0; i < maxColumnTitre.length; i++) {
this.columnEffectsParameter.setParameterValueQuietly(i, maxColumnTitre[i]);
}
if (rowParameter == null) {
this.rowEffectsParameter = new Parameter.Default("rowEffects");
} else {
this.rowEffectsParameter = rowParameter;
}
this.rowEffectsParameter.setDimension(rowLabels.size());
addVariable(this.rowEffectsParameter);
labelArray = new String[rowLabels.size()];
rowLabels.toArray(labelArray);
this.rowEffectsParameter.setDimensionNames(labelArray);
for (int i = 0; i < maxRowTitre.length; i++) {
this.rowEffectsParameter.setParameterValueQuietly(i, maxRowTitre[i]);
}
StringBuilder sb = new StringBuilder();
sb.append("\tAntigenicLikelihood:\n");
sb.append("\t\t" + this.strains.getTaxonCount() + " strains\n");
sb.append("\t\t" + columnLabels.size() + " unique columns\n");
sb.append("\t\t" + rowLabels.size() + " unique rows\n");
sb.append("\t\t" + measurements.size() + " assay measurements\n");
Logger.getLogger("dr.evomodel").info(sb.toString());
// initial locations
double earliestDate = datesParameter.getParameterValue(0);
for (int i=0; i<datesParameter.getDimension(); i++) {
double date = datesParameter.getParameterValue(i);
if (earliestDate > date) {
earliestDate = date;
}
}
for (int i = 0; i < locationsParameter.getParameterCount(); i++) {
String name = strainNames.get(i);
double date = (double) strainDateMap.get(strainNames.get(i));
double diff = (date-earliestDate);
locationsParameter.getParameter(i).setParameterValueQuietly(0, diff + MathUtils.nextGaussian());
for (int j = 1; j < mdsDimension; j++) {
double r = MathUtils.nextGaussian();
locationsParameter.getParameter(i).setParameterValueQuietly(j, r);
}
}
locationChanged = new boolean[this.locationsParameter.getRowDimension()];
logLikelihoods = new double[measurements.size()];
storedLogLikelihoods = new double[measurements.size()];
makeDirty();
}
protected void setupLocationsParameter(MatrixParameter locationsParameter, List<String> strains) {
locationsParameter.setColumnDimension(mdsDimension);
locationsParameter.setRowDimension(strains.size());
for (int i = 0; i < strains.size(); i++) {
locationsParameter.getParameter(i).setId(strains.get(i));
}
}
@Override
protected void handleModelChangedEvent(Model model, Object object, int index) {
}
@Override
protected void handleVariableChangedEvent(Variable variable, int index, Variable.ChangeType type) {
if (variable == locationsParameter) {
locationChanged[index / mdsDimension] = true;
} else if (variable == mdsPrecisionParameter) {
setLocationChangedFlags(true);
} else if (variable == columnEffectsParameter) {
setLocationChangedFlags(true);
} else if (variable == rowEffectsParameter) {
setLocationChangedFlags(true);
} else {
// could be a derived class's parameter
}
likelihoodKnown = false;
}
@Override
protected void storeState() {
System.arraycopy(logLikelihoods, 0, storedLogLikelihoods, 0, logLikelihoods.length);
}
@Override
protected void restoreState() {
double[] tmp = logLikelihoods;
logLikelihoods = storedLogLikelihoods;
storedLogLikelihoods = tmp;
likelihoodKnown = false;
}
@Override
protected void acceptState() {
}
@Override
public Model getModel() {
return this;
}
@Override
public double getLogLikelihood() {
if (!likelihoodKnown) {
logLikelihood = computeLogLikelihood();
}
return logLikelihood;
}
// This function can be overwritten to implement other sampling densities, i.e. discrete ranks
private double computeLogLikelihood() {
double precision = mdsPrecisionParameter.getParameterValue(0);
double sd = 1.0 / Math.sqrt(precision);
logLikelihood = 0.0;
int i = 0;
for (Measurement measurement : measurements) {
if (locationChanged[measurement.rowStrain] || locationChanged[measurement.columnStrain]) {
double distance = computeDistance(measurement.rowStrain, measurement.columnStrain);
double logNormalization = calculateTruncationNormalization(distance, sd);
switch (measurement.type) {
case INTERVAL: {
double minTitre = transformTitre(measurement.minTitre, measurement.column, measurement.row, distance, sd);
double maxTitre = transformTitre(measurement.maxTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementIntervalLikelihood(minTitre, maxTitre) - logNormalization;
} break;
case POINT: {
double titre = transformTitre(measurement.minTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementLikelihood(titre) - logNormalization;
} break;
case LOWER_BOUND: {
double minTitre = transformTitre(measurement.minTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementLowerBoundLikelihood(minTitre) - logNormalization;
} break;
case UPPER_BOUND: {
double maxTitre = transformTitre(measurement.maxTitre, measurement.column, measurement.row, distance, sd);
logLikelihoods[i] = computeMeasurementUpperBoundLikelihood(maxTitre) - logNormalization;
} break;
case MISSING:
break;
}
}
logLikelihood += logLikelihoods[i];
i++;
}
likelihoodKnown = true;
setLocationChangedFlags(false);
return logLikelihood;
}
private void setLocationChangedFlags(boolean flag) {
for (int i = 0; i < locationChanged.length; i++) {
locationChanged[i] = flag;
}
}
protected double computeDistance(int rowStrain, int columnStrain) {
if (rowStrain == columnStrain) {
return 0.0;
}
Parameter X = locationsParameter.getParameter(rowStrain);
Parameter Y = locationsParameter.getParameter(columnStrain);
double sum = 0.0;
for (int i = 0; i < mdsDimension; i++) {
double difference = X.getParameterValue(i) - Y.getParameterValue(i);
sum += difference * difference;
}
return Math.sqrt(sum);
}
/**
* Transforms a titre into log2 space and normalizes it with respect to a unit normal
* @param titre
* @param column
* @param row
* @param mean
* @param sd
* @return
*/
private double transformTitre(double titre, int column, int row, double mean, double sd) {
double rowEffect = rowEffectsParameter.getParameterValue(row);
double columnEffect = columnEffectsParameter.getParameterValue(column);
double t = ((rowEffect + columnEffect) * 0.5) - titre;
return (t - mean) / sd;
}
private double computeMeasurementIntervalLikelihood(double minTitre, double maxTitre) {
double cdf1 = NormalDistribution.standardCDF(minTitre, false);
double cdf2 = NormalDistribution.standardCDF(maxTitre, false);
double lnL = Math.log(cdf1 - cdf2);
if (cdf1 == cdf2) {
lnL = Math.log(cdf1);
}
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double computeMeasurementLikelihood(double titre) {
double lnL = Math.log(NormalDistribution.pdf(titre, 0.0, 1.0));
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double computeMeasurementLowerBoundLikelihood(double transformedMinTitre) {
// a lower bound in non-transformed titre so the bottom tail of the distribution
double cdf = NormalDistribution.standardTail(transformedMinTitre, true);
double lnL = Math.log(cdf);
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double computeMeasurementUpperBoundLikelihood(double transformedMaxTitre) {
// a upper bound in non-transformed titre so the upper tail of the distribution
// using special tail function of NormalDistribution (see main() in NormalDistribution for test)
double tail = NormalDistribution.standardTail(transformedMaxTitre, false);
double lnL = Math.log(tail);
if (Double.isNaN(lnL) || Double.isInfinite(lnL)) {
throw new RuntimeException("infinite");
}
return lnL;
}
private double calculateTruncationNormalization(double distance, double sd) {
return NormalDistribution.cdf(distance, 0.0, sd, true);
}
@Override
public void makeDirty() {
likelihoodKnown = false;
setLocationChangedFlags(true);
}
private class Measurement {
private Measurement(final int column, final int columnStrain, final int row, final int rowStrain, final MeasurementType type, final double minTitre, final double maxTitre) {
this.column = column;
this.columnStrain = columnStrain;
this.row = row;
this.rowStrain = rowStrain;
this.type = type;
this.minTitre = Math.log(minTitre) / Math.log(2);
this.maxTitre = Math.log(maxTitre) / Math.log(2);
}
final int column;
final int row;
final int columnStrain;
final int rowStrain;
final MeasurementType type;
final double minTitre;
final double maxTitre;
};
private final List<Measurement> measurements = new ArrayList<Measurement>();
private final List<String> columnLabels = new ArrayList<String>();
private final List<String> rowLabels = new ArrayList<String>();
private final int mdsDimension;
private final Parameter mdsPrecisionParameter;
private final MatrixParameter locationsParameter;
private final TaxonList strains;
// private final CompoundParameter tipTraitParameter;
private final Parameter columnEffectsParameter;
private final Parameter rowEffectsParameter;
private double logLikelihood = 0.0;
private boolean likelihoodKnown = false;
private final boolean[] locationChanged;
private double[] logLikelihoods;
private double[] storedLogLikelihoods;
// XMLObjectParser
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public final static String FILE_NAME = "fileName";
public final static String TIP_TRAIT = "tipTrait";
public final static String LOCATIONS = "locations";
public final static String DATES = "dates";
public static final String MDS_DIMENSION = "mdsDimension";
public static final String MDS_PRECISION = "mdsPrecision";
public static final String COLUMN_EFFECTS = "columnEffects";
public static final String ROW_EFFECTS = "rowEffects";
public static final String STRAINS = "strains";
public String getParserName() {
return ANTIGENIC_LIKELIHOOD;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
String fileName = xo.getStringAttribute(FILE_NAME);
DataTable<String[]> assayTable;
try {
assayTable = DataTable.Text.parse(new FileReader(fileName), true, false);
} catch (IOException e) {
throw new XMLParseException("Unable to read assay data from file: " + e.getMessage());
}
int mdsDimension = xo.getIntegerAttribute(MDS_DIMENSION);
// CompoundParameter tipTraitParameter = null;
// if (xo.hasChildNamed(TIP_TRAIT)) {
// tipTraitParameter = (CompoundParameter) xo.getElementFirstChild(TIP_TRAIT);
TaxonList strains = null;
if (xo.hasChildNamed(STRAINS)) {
strains = (TaxonList) xo.getElementFirstChild(STRAINS);
}
MatrixParameter locationsParameter = (MatrixParameter) xo.getElementFirstChild(LOCATIONS);
Parameter datesParameter = null;
if (xo.hasChildNamed(DATES)) {
datesParameter = (Parameter) xo.getElementFirstChild(DATES);
}
Parameter mdsPrecision = (Parameter) xo.getElementFirstChild(MDS_PRECISION);
Parameter columnEffectsParameter = (Parameter) xo.getElementFirstChild(COLUMN_EFFECTS);
Parameter rowEffectsParameter = (Parameter) xo.getElementFirstChild(ROW_EFFECTS);
AntigenicLikelihood AGL = new AntigenicLikelihood(
mdsDimension,
mdsPrecision,
strains,
locationsParameter,
datesParameter,
columnEffectsParameter,
rowEffectsParameter,
assayTable,
null);
Logger.getLogger("dr.evomodel").info("Using EvolutionaryCartography model. Please cite:\n" + Utils.getCitationString(AGL));
return AGL;
}
|
package dr.inference.operators.hmc;
import dr.evolution.alignment.PatternList;
import dr.inference.hmc.GradientWrtParameterProvider;
import dr.inference.hmc.PrecisionColumnProvider;
import dr.inference.hmc.PrecisionMatrixVectorProductProvider;
import dr.inference.model.Parameter;
import dr.math.MathUtils;
import dr.math.matrixAlgebra.ReadableVector;
import dr.math.matrixAlgebra.WrappedVector;
/**
* @author Aki Nishimura
* @author Zhenyu Zhang
* @author Marc A. Suchard
*/
public class ZigZagOperator extends AbstractParticleOperator {
public ZigZagOperator(GradientWrtParameterProvider gradientProvider,
PrecisionMatrixVectorProductProvider multiplicationProvider,
PrecisionColumnProvider columnProvider,
double weight, Options runtimeOptions, Parameter mask, PatternList patternList) {
super(gradientProvider, multiplicationProvider, weight, runtimeOptions, mask, patternList);
this.columnProvider = columnProvider;
}
@Override
public String getOperatorName() {
return "Zig-zag particle operator";
}
@Override
double integrateTrajectory(WrappedVector position) {
String signString;
if (DEBUG_SIGN) {
signString = printSign(position);
System.err.println(signString);
}
WrappedVector momentum = drawInitialMomentum();
WrappedVector velocity = drawInitialVelocity(momentum);
WrappedVector gradient = getInitialGradient();
WrappedVector action = getPrecisionProduct(velocity);
BounceState bounceState = new BounceState(drawTotalTravelTime());
int count = 0;
while (bounceState.isTimeRemaining()) {
if (DEBUG) {
debugBefore(position, count);
++count;
}
MinimumTravelInformation boundaryBounce = getNextBoundaryBounce(position, velocity);
MinimumTravelInformation gradientBounce = getNextGradientBounce(action, gradient, momentum);
if (DEBUG) {
System.err.println("boundary: " + boundaryBounce);
System.err.println("gradient: " + gradientBounce);
}
bounceState = doBounce(bounceState, boundaryBounce, gradientBounce,
position, velocity, momentum, gradient, action);
if (DEBUG) {
debugAfter(bounceState, position);
String newSignString = printSign(position);
System.err.println(newSignString);
if (bounceState.type != Type.BOUNDARY && signString.compareTo(newSignString) != 0) {
System.err.println("Sign error");
}
}
}
if (DEBUG_SIGN) {
printSign(position);
}
return 0.0;
}
private String printSign(ReadableVector position) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < position.getDim(); ++i) {
double p = position.get(i);
if (p < 0.0) sb.append("- ");
else if (p > 0.0) sb.append("+ ");
else sb.append("0 ");
}
return sb.toString();
}
private void debugAfter(BounceState bounceState, ReadableVector position) {
System.err.println("post position: " + position);
System.err.println(bounceState);
System.err.println();
}
private void debugBefore(ReadableVector position, int count) {
System.err.println("before number: " + count);
System.err.println("init position: " + position);
}
enum Type {
NONE,
BOUNDARY,
GRADIENT
}
class BounceState {
final Type type;
final int index;
final double remainingTime;
BounceState(Type type, int index, double remainingTime) {
this.type = type;
this.index = index;
this.remainingTime = remainingTime;
}
BounceState(double remainingTime) {
this.type = Type.NONE;
this.index = -1;
this.remainingTime = remainingTime;
}
boolean isTimeRemaining() {
return remainingTime > 0.0;
}
public String toString() {
return "remainingTime : " + remainingTime + "\n" +
"lastBounceType: " + type + " in dim: " + index;
}
}
private MinimumTravelInformation getNextGradientBounce(ReadableVector action,
ReadableVector gradient,
ReadableVector momentum) {
double minimumRoot = Double.POSITIVE_INFINITY;
int index = -1;
for (int i = 0, len = action.getDim(); i < len; ++i) {
double root = minimumPositiveRoot(action.get(i) / 2, -gradient.get(i), -momentum.get(i));
if (root < minimumRoot) {
minimumRoot = root;
index = i;
}
}
return new MinimumTravelInformation(minimumRoot, index);
}
private MinimumTravelInformation getNextBoundaryBounce(ReadableVector position,
ReadableVector velocity) {
double minimumTime = Double.POSITIVE_INFINITY;
int index = -1;
for (int i = 0, len = position.getDim(); i < len; ++i) {
double x = position.get(i);
double v = velocity.get(i);
if (headingTowardsBoundary(x, v)) { // Also ensures x != 0.0
double time = Math.abs(x / v);
if (time < minimumTime) {
minimumTime = time;
index = i;
}
}
}
return new MinimumTravelInformation(minimumTime, index);
}
private static double minimumPositiveRoot(double a,
double b,
double c) {
double discriminant = b * b - 4 * a * c;
if (discriminant < 0.0) {
return Double.POSITIVE_INFINITY;
}
double sqrtDiscriminant = Math.sqrt(discriminant);
double root = (-b - sqrtDiscriminant) / (2 * a);
if (root <= 0.0) {
root = (-b + sqrtDiscriminant) / (2 * a);
}
if (root <= 0.0) {
root = Double.POSITIVE_INFINITY;
}
return root;
}
private WrappedVector drawInitialMomentum() {
ReadableVector mass = preconditioning.mass;
double[] momentum = new double[mass.getDim()];
for (int i = 0, len = momentum.length; i < len; i++) {
int sign = (MathUtils.nextDouble() > 0.5) ? 1 : -1;
momentum[i] = sign * MathUtils.nextExponential(1) * Math.sqrt(mass.get(i));
}
if (mask != null) {
applyMask(momentum);
}
return new WrappedVector.Raw(momentum);
}
private static int sign(double x) {
int sign = 0;
if (x > 0.0) {
sign = 1;
} else if (x < 0.0) {
sign = -1;
}
return sign;
}
private WrappedVector drawInitialVelocity(WrappedVector momentum) {
ReadableVector mass = preconditioning.mass;
double[] velocity = new double[momentum.getDim()];
for (int i = 0, len = momentum.getDim(); i < len; ++i) {
velocity[i] = sign(momentum.get(i)) / Math.sqrt(mass.get(i));
}
return new WrappedVector.Raw(velocity);
}
private ReadableVector getPrecisionColumn(int index) {
double[] precisionColumn = columnProvider.getColumn(index);
if (mask != null) {
applyMask(precisionColumn);
}
return new WrappedVector.Raw(precisionColumn);
}
private BounceState doBounce(BounceState initialBounceState,
MinimumTravelInformation boundaryBounce,
MinimumTravelInformation gradientBounce,
WrappedVector position, WrappedVector velocity,
WrappedVector momentum,
WrappedVector gradient, WrappedVector action) {
double remainingTime = initialBounceState.remainingTime;
double eventTime = Math.min(boundaryBounce.time, gradientBounce.time);
final BounceState finalBounceState;
if (remainingTime < eventTime) { // No event during remaining time
updatePosition(position, velocity, remainingTime);
finalBounceState = new BounceState(Type.NONE, -1, 0.0);
} else {
updatePosition(position, velocity, eventTime);
updateMomentum(momentum, gradient, action, eventTime);
final Type eventType;
final int eventIndex;
if (boundaryBounce.time < gradientBounce.time) { // Reflect against the boundary
eventType = Type.BOUNDARY;
eventIndex = boundaryBounce.index;
reflectMomentum(momentum, position, eventIndex);
} else { // Bounce caused by the gradient
eventType = Type.GRADIENT;
eventIndex = gradientBounce.index;
}
reflectVelocity(velocity, eventIndex);
updateGradient(gradient, eventTime, action);
updateAction(action, velocity, eventIndex);
finalBounceState = new BounceState(eventType, eventIndex, remainingTime - eventTime);
}
return finalBounceState;
}
private void updateAction(WrappedVector action, ReadableVector velocity, int eventIndex) {
ReadableVector column = getPrecisionColumn(eventIndex);
double v = velocity.get(eventIndex);
for (int i = 0, len = action.getDim(); i < len; ++i) {
action.set(i,
action.get(i) + 2 * v * column.get(i)
);
}
if (mask != null) {
applyMask(action);
}
}
private static void reflectMomentum(WrappedVector momentum,
WrappedVector position,
int eventIndex) {
momentum.set(eventIndex, -momentum.get(eventIndex));
position.set(eventIndex, 0.0); // Exactly on boundary to avoid potential round-off error
}
private static void reflectVelocity(WrappedVector velocity,
int eventIndex) {
velocity.set(eventIndex, -velocity.get(eventIndex));
}
private void updateMomentum(WrappedVector momentum,
ReadableVector gradient,
ReadableVector action,
double eventTime) {
for (int i = 0, len = momentum.getDim(); i < len; ++i) {
momentum.set(i,
momentum.get(i) + eventTime * gradient.get(i) - eventTime * eventTime * action.get(i) / 2
);
}
if (mask != null) {
applyMask(momentum);
}
}
private final PrecisionColumnProvider columnProvider;
private final static boolean DEBUG = false;
private final static boolean DEBUG_SIGN = false;
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package edacc.manageDB;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.JPopupMenu;
/**
* MouseListener of the table of instances in the ManageDBMode.
* Opens a JPopMenu, if the PopupTrieer has been used, at the Postion of the cursor of the mouse.
* @author rretz
*/
public class InstanceTableMouseListener extends MouseAdapter {
private final JPopupMenu jPM;
public InstanceTableMouseListener(JPopupMenu jPM) {
this.jPM = jPM;
}
@Override
public void mouseReleased(MouseEvent evt) {
if (evt.isPopupTrigger()) {
jPM.show(evt.getComponent(), evt.getX(), evt.getY());
}
}
@Override
public void mousePressed(MouseEvent evt) {
if (evt.isPopupTrigger()) {
jPM.show(evt.getComponent(), evt.getX(), evt.getY());
}
}
}
|
package edu.jhu.hltcoe.gridsearch.dmv;
import ilog.concert.IloException;
import ilog.concert.IloNumExpr;
import ilog.concert.IloNumVar;
import ilog.cplex.CpxException;
import ilog.cplex.IloCplex;
import ilog.cplex.IloCplex.UnknownObjectException;
import java.io.File;
import java.util.Arrays;
import org.apache.log4j.Logger;
import depparsing.model.NonterminalMap;
import edu.jhu.hltcoe.data.DepTree;
import edu.jhu.hltcoe.data.Sentence;
import edu.jhu.hltcoe.parse.DmvCkyParser;
import edu.jhu.hltcoe.parse.pr.DepInstance;
import edu.jhu.hltcoe.parse.pr.DepSentenceDist;
import edu.jhu.hltcoe.util.Pair;
import edu.jhu.hltcoe.util.Utilities;
public class Projections {
private static Logger log = Logger.getLogger(Projections.class);
private IloCplex cplex;
private File tempDir;
public Projections() {
this(null);
}
public Projections(File tempDir) {
this.tempDir = tempDir;
try {
cplex = new IloCplex();
// Turn off stdout but not stderr
cplex.setOut(null);
} catch (IloException e) {
throw new RuntimeException(e);
}
}
public static double[] getProjectedParams(double[] params) {
double[] sortedParams = Arrays.copyOf(params, params.length);
Arrays.sort(sortedParams);
int n = params.length;
double that = Double.POSITIVE_INFINITY;
for (int i = n - 1; i >= 0; i
double ti = 0.0;
for (int j = i + 1; j <= n; j++) {
ti += sortedParams[j - 1];
}
ti -= 1.0;
ti /= n - i;
// System.out.printf("t_{%d} = %f\n", i, ti);
if (i == 0 || ti >= sortedParams[i - 1]) {
that = ti;
break;
}
}
// System.out.printf("t_hat = %f\n", that);
// Just re-use the sortedParams array instead of reallocating memory
double[] newParams = sortedParams;
for (int i = 0; i < newParams.length; i++) {
newParams[i] = params[i] - that;
if (newParams[i] < 0.0) {
newParams[i] = 0.0;
}
}
return newParams;
}
/**
* @param logBounds Bounds for log probabilities
* @param c Index of distribution which has bounds
* @param params Vector to project onto (param.length - 1)-simplex in probability space
* @return The projected parameters or null if infeasible
*/
public double[] getProjectedParams(DmvBounds logBounds, int c, double[] params) throws IloException {
double[] lbs = new double[params.length];
double[] ubs = new double[params.length];
for (int m = 0; m < params.length; m++) {
lbs[m] = Utilities.exp(logBounds.getLb(c, m));
ubs[m] = Utilities.exp(logBounds.getUb(c, m));
}
return getProjectedParams(params, lbs, ubs);
}
/**
* @param params Vector to project onto (param.length - 1)-simplex in probability space
* @param lbs Lower bounds in probability space
* @param ubs Upper bounds in probability space
* @return The projected parameters or null if infeasible
*/
public double[] getProjectedParams(double[] params, double[] lbs, double[] ubs) throws IloException,
UnknownObjectException {
cplex.clearModel();
IloNumVar[] newParamVars = new IloNumVar[params.length];
for (int m = 0; m < newParamVars.length; m++) {
newParamVars[m] = cplex.numVar(lbs[m], ubs[m], String.format("p_{%d}", m));
}
cplex.addEq(cplex.sum(newParamVars), 1.0, "sum-to-one");
IloNumExpr[] squaredDiffs = new IloNumExpr[params.length];
for (int m = 0; m < squaredDiffs.length; m++) {
squaredDiffs[m] = cplex
.prod(cplex.diff(params[m], newParamVars[m]), cplex.diff(params[m], newParamVars[m]));
}
cplex.addMinimize(cplex.sum(squaredDiffs), "obj");
if (tempDir != null) {
cplex.exportModel(new File(tempDir, "proj.lp").getAbsolutePath());
}
try {
if (!cplex.solve()) {
// throw new RuntimeException("projection infeasible");
return null;
}
} catch (CpxException e) {
log.error("params: " + Arrays.toString(params));
log.error("lbs: " + Arrays.toString(lbs));
log.error("ubs: " + Arrays.toString(ubs));
throw e;
}
double[] values = cplex.getValues(newParamVars);
for (int m=0; m<values.length; m++) {
if (values[m] < -1e-8) {
log.warn("Oddly low value after projection: values[m] = " + values[m]);
}
if (values[m] < 0.0) {
values[m] = 0.0;
}
}
return values;
}
public static DepTree getProjectiveParse(Sentence sentence, double[] fracRoot, double[][] fracChild) {
DmvCkyParser parser = new DmvCkyParser();
int[] tags = new int[sentence.size()];
DepInstance depInstance = new DepInstance(tags);
DepSentenceDist sd = new DepSentenceDist(depInstance, new NonterminalMap(2, 1), fracRoot, fracChild);
Pair<DepTree, Double> pair = parser.parse(sentence, sd);
DepTree tree = pair.get1();
return tree;
}
public void setTempDir(File tempDir) {
this.tempDir = tempDir;
}
}
|
package edu.mit.streamjit.impl.compiler;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.collect.Table;
import edu.mit.streamjit.api.RoundrobinSplitter;
import edu.mit.streamjit.api.StreamCompilationFailedException;
import edu.mit.streamjit.api.Worker;
import edu.mit.streamjit.impl.blob.Blob;
import edu.mit.streamjit.impl.blob.Blob.Token;
import edu.mit.streamjit.impl.blob.DrainData;
import edu.mit.streamjit.impl.common.Configuration;
import edu.mit.streamjit.impl.common.Configuration.SwitchParameter;
import edu.mit.streamjit.impl.common.IOInfo;
import static edu.mit.streamjit.util.Combinators.*;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.TreeSet;
/**
*
* @author Jeffrey Bosboom <jeffreybosboom@gmail.com>
* @since 9/22/2013
*/
public class Compiler2 {
private final ImmutableSet<ActorArchetype> archetypes;
private final NavigableSet<Actor> actors;
private ImmutableSortedSet<ActorGroup> groups;
private final Configuration config;
private final int maxNumCores;
private final DrainData initialState;
private final Set<Storage> storage;
private ImmutableMap<ActorGroup, Integer> externalSchedule;
private final Map<Token, MethodHandle> tokenInputIndices = new HashMap<>(), tokenOutputIndices = new HashMap<>();
public Compiler2(Set<Worker<?, ?>> workers, Configuration config, int maxNumCores, DrainData initialState) {
Map<Class<?>, ActorArchetype> archetypesBuilder = new HashMap<>();
Map<Worker<?, ?>, Actor> actorsBuilder = new HashMap<>();
for (Worker<?, ?> w : workers) {
@SuppressWarnings("unchecked")
Class<? extends Worker<?, ?>> wClass = (Class<? extends Worker<?, ?>>)w.getClass();
if (archetypesBuilder.get(wClass) == null)
archetypesBuilder.put(wClass, new ActorArchetype(wClass));
Actor actor = new Actor(w, archetypesBuilder.get(wClass));
actorsBuilder.put(w, actor);
}
this.archetypes = ImmutableSet.copyOf(archetypesBuilder.values());
this.actors = new TreeSet<>(actorsBuilder.values());
Table<Object, Object, Storage> storageTable = HashBasedTable.create();
for (Actor a : actors)
a.connect(actorsBuilder, storageTable);
this.storage = new HashSet<>(storageTable.values());
for (Object o : storageTable.rowKeySet())
if (o instanceof Token)
tokenInputIndices.put((Token)o, MethodHandles.identity(int.class));
for (Object o : storageTable.columnKeySet())
if (o instanceof Token)
tokenOutputIndices.put((Token)o, MethodHandles.identity(int.class));
this.config = config;
this.maxNumCores = maxNumCores;
this.initialState = initialState;
}
public Blob compile() {
fuse();
schedule();
// identityRemoval();
splitterRemoval();
//joinerRemoval();
return null;
}
/**
* Fuses actors into groups as directed by the configuration.
*/
private void fuse() {
List<ActorGroup> actorGroups = new ArrayList<>();
for (Actor a : actors)
actorGroups.add(ActorGroup.of(a));
//Fuse as much as possible.
outer: do {
for (Iterator<ActorGroup> it = actorGroups.iterator(); it.hasNext();) {
ActorGroup g = it.next();
String paramName = String.format("fuse%d", g.id());
SwitchParameter<Boolean> fuseParam = config.getParameter(paramName, SwitchParameter.class, Boolean.class);
if (g.isPeeking() || !fuseParam.getValue() || g.predecessorGroups().size() > 1)
continue;
ActorGroup fusedGroup = ActorGroup.fuse(g, g.predecessorGroups().iterator().next());
it.remove();
actorGroups.add(fusedGroup);
continue outer;
}
break;
} while (true);
this.groups = ImmutableSortedSet.copyOf(groups);
}
/**
* Computes each group's internal schedule and the external schedule.
*/
private void schedule() {
for (ActorGroup g : groups)
internalSchedule(g);
Schedule.Builder<ActorGroup> scheduleBuilder = Schedule.builder();
for (ActorGroup g : groups) {
for (Storage e : g.outputs()) {
if (!e.hasDownstreamActor())
continue;
ActorGroup other = e.downstreamActor().group();
int upstreamAdjust = g.schedule().get(e.upstreamActor());
int downstreamAdjust = other.schedule().get(e.downstreamActor());
scheduleBuilder.connect(g, other)
.push(e.push() * upstreamAdjust)
.pop(e.pop() * downstreamAdjust)
.peek(e.peek() * downstreamAdjust)
.bufferExactly(0);
}
}
try {
externalSchedule = scheduleBuilder.build().getSchedule();
} catch (Schedule.ScheduleException ex) {
throw new StreamCompilationFailedException("couldn't find external schedule", ex);
}
}
/**
* Computes the internal schedule for the given group.
*/
private void internalSchedule(ActorGroup g) {
Schedule.Builder<Actor> scheduleBuilder = Schedule.builder();
scheduleBuilder.addAll(g.actors());
Map<Worker<?, ?>, Actor> map = new HashMap<>();
for (Actor a : g.actors())
map.put(a.worker(), a);
for (IOInfo info : IOInfo.internalEdges(map.keySet())) {
scheduleBuilder.connect(map.get(info.upstream()), map.get(info.downstream()))
.push(info.upstream().getPushRates().get(info.getUpstreamChannelIndex()).max())
.pop(info.downstream().getPopRates().get(info.getDownstreamChannelIndex()).max())
.peek(info.downstream().getPeekRates().get(info.getDownstreamChannelIndex()).max())
.bufferExactly(0);
}
try {
Schedule<Actor> schedule = scheduleBuilder.build();
g.setSchedule(schedule.getSchedule());
} catch (Schedule.ScheduleException ex) {
throw new StreamCompilationFailedException("couldn't find internal schedule for group "+g.id(), ex);
}
}
private void splitterRemoval() {
for (Actor splitter : ImmutableSortedSet.copyOf(actors)) {
List<MethodHandle> transfers = splitterTransferFunctions(splitter);
if (transfers == null) continue;
Storage survivor = Iterables.getOnlyElement(splitter.inputs());
MethodHandle Sin = Iterables.getOnlyElement(splitter.inputIndexFunctions());
for (int i = 0; i < splitter.outputs().size(); ++i) {
Storage victim = splitter.outputs().get(i);
MethodHandle t = transfers.get(i);
MethodHandle t2 = MethodHandles.filterReturnValue(t, Sin);
for (Object o : victim.downstream())
if (o instanceof Actor) {
Actor q = (Actor)o;
List<Storage> inputs = q.inputs();
List<MethodHandle> inputIndices = q.inputIndexFunctions();
for (int j = 0; j < inputs.size(); ++j)
if (inputs.get(j).equals(victim)) {
inputs.set(j, survivor);
inputIndices.set(j, MethodHandles.filterReturnValue(t2, inputIndices.get(j)));
}
} else if (o instanceof Token) {
Token q = (Token)o;
tokenInputIndices.put(q, MethodHandles.filterReturnValue(t2, tokenInputIndices.get(q)));
} else
throw new AssertionError(o);
}
removeActor(splitter);
}
}
/**
* Returns transfer functions for the given splitter, or null if the actor
* isn't a splitter or isn't one of the built-in splitters or for some other
* reason we can't make transfer functions.
*
* A splitter has one transfer function for each output that maps logical
* output indices to logical input indices (representing the splitter's
* distribution pattern).
* @param a an actor
* @return transfer functions, or null
*/
private List<MethodHandle> splitterTransferFunctions(Actor a) {
if (a.worker() instanceof RoundrobinSplitter) {
//Nx, Nx + 1, Nx + 2, ..., Nx+(N-1)
int N = a.outputs().size();
ImmutableList.Builder<MethodHandle> transfer = ImmutableList.builder();
for (int n = 0; n < N; ++n)
transfer.add(add(mul(MethodHandles.identity(int.class), N), n));
return transfer.build();
} else //TODO: WeightedRoundrobinSplitter, DuplicateSplitter
return null;
}
/**
* Removes an Actor from this compiler's data structures. The Actor should
* already have been unlinked from the graph (no incoming edges); this takes
* care of removing it from the actors set, its actor group (possibly
* removing the group if it's now empty), and the schedule.
* @param a the actor to remove
*/
private void removeActor(Actor a) {
assert actors.contains(a) : a;
actors.remove(a);
ActorGroup g = a.group();
g.remove(a);
if (g.actors().isEmpty()) {
groups = ImmutableSortedSet.copyOf(Sets.difference(groups, ImmutableSet.of(g)));
externalSchedule = ImmutableMap.copyOf(Maps.difference(externalSchedule, ImmutableMap.of(g, 0)).entriesOnlyOnLeft());
}
}
// /**
// * Removes Identity instances from the graph, unless doing so would make the
// * graph empty.
// */
// private void identityRemoval() {
// //TODO: remove from group, possibly removing the group if it becomes empty
// for (Iterator<Actor> iter = actors.iterator(); iter.hasNext();) {
// if (actors.size() == 1)
// break;
// Actor actor = iter.next();
// if (!actor.archetype().workerClass().equals(Identity.class))
// continue;
// iter.remove();
// assert actor.predecessors().size() == 1 && actor.successors().size() == 1;
// Object upstream = actor.predecessors().get(0), downstream = actor.successors().get(0);
// if (upstream instanceof Actor)
// replace(((Actor)upstream).successors(), actor, downstream);
// if (downstream instanceof Actor)
// replace(((Actor)downstream).predecessors(), actor, upstream);
// //No index function changes required for Identity actors.
// private static int replace(List<Object> list, Object target, Object replacement) {
// int replacements = 0;
// for (int i = 0; i < list.size(); ++i)
// if (Objects.equals(list.get(0), target)) {
// list.set(i, replacement);
// ++replacements;
// return replacements;
}
|
package edu.ucla.cens.awserver.cache;
/**
* Provide data lookup functionality against a cache.
*
* @author selsky
*/
public interface CacheService {
public Object lookup(Object key);
}
|
package edu.wpi.first.wpilibj.templates;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.command.CommandGroup;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import edu.wpi.first.wpilibj.templates.commands.AutoCommandGroup;
import edu.wpi.first.wpilibj.templates.commands.CommandBase;
import edu.wpi.first.wpilibj.templates.commands.TelopCommandGroup;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class Robot3331 extends IterativeRobot {
CommandGroup autonomousCommand, telopCommand;
public void robotInit() {
autonomousCommand = new AutoCommandGroup();
telopCommand = new TelopCommandGroup();
// Initialize all subsystems
CommandBase.init();
}
public void autonomousInit() {
// schedule the autonomous command
autonomousCommand.start();
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
Scheduler.getInstance().run();
}
public void teleopInit() {
// This makes sure that the autonomous stops running when
// teleop starts running. If you want the autonomous to
// continue until interrupted by another command, remove
// this line or comment it out.
autonomousCommand.cancel();
// schedule the telop command
telopCommand.start();
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
Scheduler.getInstance().run();
}
/**
* This function is called periodically during test mode
*/
public void testPeriodic() {
LiveWindow.run();
}
}
|
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.InetAddress;
import java.io.*;
import org.json.simple.JSONValue;
import org.json.simple.JSONObject;
import org.json.simple.JSONArray;
/** Interface to the Pings server.
* @todo Add methods to get addresses to ping, and to submit ping results.
* @todo Error handling when the http status returned is not 200.
*
* @author Christian Hudon <chrish@pianocktail.org>
*/
public class ServerProxy {
public static class Pings {
public InetAddress[] addresses;
public JSONObject[] geoip_info;
public String[] results;
public String token;
}
public ServerProxy(String server_hostname) {
m_server_hostname = server_hostname;
m_server_port = 80;
}
public ServerProxy(String server_hostname, int server_port) {
m_server_hostname = server_hostname;
m_server_port = server_port;
}
/** Retrieves a list of addresses to ping. */
public Pings getPings(ClientInfo client_info) throws IOException {
// Send request to server. Returns a dict with the following keys
// and values: "token" (a string), "pings" (a list of IP addresses),
// "geoip" (a list of dicts, one per IP address).
JSONObject json_result = (JSONObject)doJsonRequest("/get_pings", null);
// Parse out json_result.
Pings pings = new Pings();
pings.token = (String)json_result.get("token");
JSONArray addresses = (JSONArray)json_result.get("pings");
int num_addresses = addresses.size();
pings.addresses = new InetAddress[num_addresses];
for (int i = 0; i < num_addresses; i++) {
pings.addresses[i] = InetAddress.getByName((String)addresses.get(i));
}
JSONArray all_geoip_data = (JSONArray)json_result.get("geoip");
pings.geoip_info = new JSONObject[num_addresses];
if (all_geoip_data != null) {
for (int i = 0; i < num_addresses; i++) {
Object o = all_geoip_data.get(i);
if (o != null) {
pings.geoip_info[i] = (JSONObject)o;
}
else
pings.geoip_info[i] = null;
}
}
pings.results = new String[num_addresses];
return pings;
}
/** Submits the ping results back to the server. */
public void submitResults(ClientInfo client_info, Pings pings) throws IOException {
// Build JSON request, a dict with the following keys and values:
// "token" (a string... the same as return by getPings), "results"
// (a list of arbitrary JSON objects, one per ping), and optionally
// "userid" (a string).
JSONObject json_request = new JSONObject();
json_request.put("token", pings.token);
json_request.put("results", pings.results);
String nick = client_info.getNickname();
if (nick != null && nick.length() != 0)
json_request.put("userid", nick);
// Send request to server. Returns a constant (at least for now).
Object json_result = doJsonRequest("/submit_ping_results", json_request);
}
/// The hostname of the Pings server.
private String m_server_hostname;
/// The port of the Pings server.
private int m_server_port;
private static final String CHARSET = "UTF-8";
/** Sends a JSON object via POST to the given request URL path. Returns the
JSON response object. */
private Object doJsonRequest(String request_path, Object content) throws IOException {
// Serialize content to JSON.
String json_request = JSONValue.toJSONString(content);
// Open connection to URL.
URL server_url = new URL("http", m_server_hostname, m_server_port,
request_path);
HttpURLConnection connection = (HttpURLConnection)server_url.openConnection();
connection.setDoOutput(true);
connection.setRequestMethod("POST");
connection.setRequestProperty("Accept-Charset", CHARSET);
connection.setRequestProperty("Content-Type", "application/json;charset=" + CHARSET);
// Write request.
OutputStream output = connection.getOutputStream();
try {
output.write(json_request.getBytes(CHARSET));
}
finally {
output.close();
}
int status = connection.getResponseCode();
if (status != HttpURLConnection.HTTP_OK) {
// TODO Improve error handling.
return null;
}
// Read back reply.
InputStream response = connection.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(response));
StringBuilder sb_response = new StringBuilder();
String chunk = br.readLine();
while (chunk != null) {
sb_response.append(chunk);
chunk = br.readLine();
}
return JSONValue.parse(sb_response.toString());
}
}
|
package org.moxie.maxml;
import static java.text.MessageFormat.format;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
/**
* Static utility methods for the Maxml parser.
*
* @author James Moger
*
*/
public class Maxml {
/**
* Parse the Maxml content and reflectively build an object with the
* document's data.
*
* @param content
* @param clazz
* @return an object
* @throws MaxmlException
*/
public static <X> X parse(String content, Class<X> clazz)
throws MaxmlException {
try {
MaxmlParser parser = new MaxmlParser();
Map<String, Object> map = parser.parse(new BufferedReader(
new StringReader(content)));
X x = clazz.newInstance();
Map<String, Field> fields = new HashMap<String, Field>();
for (Field field : clazz.getFields()) {
fields.put(field.getName().toLowerCase(), field);
}
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey();
Object o = entry.getValue();
if (fields.containsKey(key)) {
Field field = fields.get(key);
field.set(x, o);
} else {
throw new MaxmlException(format("Unbound property \"{0}\"",
key));
}
}
return x;
} catch (MaxmlException e) {
throw e;
} catch (Exception t) {
throw new MaxmlException(t);
}
}
/**
* Parse the content of the Maxml document and return a object map of the
* content.
*
* @param content
* @return an object map
*/
public static MaxmlMap parse(String content)
throws MaxmlException {
try {
MaxmlParser parser = new MaxmlParser();
return parser.parse(new BufferedReader(new StringReader(content)));
} catch (MaxmlException e) {
throw e;
} catch (Exception e) {
throw new MaxmlException(e);
}
}
/**
* Parse the content of the Maxml document and return an object map of the
* content.
*
* @param is
* an input stream
* @return an object map
*/
public static MaxmlMap parse(InputStream is)
throws MaxmlException {
try {
MaxmlParser parser = new MaxmlParser();
return parser.parse(new BufferedReader(new InputStreamReader(is,
"UTF-8")));
} catch (MaxmlException e) {
throw e;
} catch (Exception e) {
throw new MaxmlException(e);
}
}
/**
* Parse the content of the Maxml document and return an object map of the
* content.
*
* @param file
* a file
* @return an object map
*/
public static MaxmlMap parse(File file)
throws MaxmlException {
InputStream is = null;
try {
is = new FileInputStream(file);
return parse(is);
} catch (MaxmlException e) {
throw new MaxmlException(file.getAbsolutePath(), e);
} catch (Exception e) {
throw new MaxmlException(file.getAbsolutePath(), e);
} finally {
try {
if (is != null) {
is.close();
}
} catch (Exception e) {
}
}
}
}
|
package core.config;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
import com.sun.glass.events.KeyEvent;
import argo.jdom.JsonNode;
import argo.jdom.JsonNodeFactories;
import argo.jdom.JsonRootNode;
import core.ipc.IPCServiceManager;
import core.keyChain.KeyChain;
import core.languageHandler.compiler.DynamicCompilerManager;
import core.userDefinedTask.TaskGroup;
import frontEnd.MainBackEndHolder;
import utilities.FileUtility;
import utilities.ILoggable;
import utilities.JSONUtility;
public class Config implements ILoggable {
public static final String RELEASE_VERSION = "4.0";
private static final String CONFIG_FILE_NAME = "config.json";
public static final String EXPORTED_CONFIG_FILE_NAME = "exported_" + CONFIG_FILE_NAME;
protected static final String CURRENT_CONFIG_VERSION = "2.3";
private static final Level DEFAULT_NATIVE_HOOK_DEBUG_LEVEL = Level.WARNING;
private static final boolean DEFAULT_TRAY_ICON_USE = true;
private static final List<ConfigParser> knownParsers;
private DynamicCompilerManager compilerFactory;
private final MainBackEndHolder backEnd;
public static final int HALT_TASK = KeyEvent.VK_ESCAPE; // This should be hardcoded, and must not be changed
private KeyChain RECORD;
private KeyChain REPLAY;
private KeyChain COMPILED_REPLAY;
private int mouseGestureActivationKey;
private boolean useTrayIcon;
private boolean enabledHaltingKeyPressed;
/**
* If enabled will consider executing task on key released event. Otherwise will consider executing
* task on key pressed event.
*/
private boolean executeOnKeyReleased;
private Level nativeHookDebugLevel;
static {
knownParsers = Arrays.asList(new ConfigParser[]{
new Parser1_0(),
new Parser1_1(),
new Parser1_2(),
new Parser1_3(),
new Parser1_4(),
new Parser1_5(),
new Parser1_6(),
new Parser1_7(),
new Parser1_8(),
new Parser1_9(),
new Parser2_0(),
new Parser2_1(),
new Parser2_2(),
new Parser2_3(),
});
}
public Config(MainBackEndHolder backEnd) {
this.backEnd = backEnd;
useTrayIcon = DEFAULT_TRAY_ICON_USE;
this.enabledHaltingKeyPressed = true;
this.executeOnKeyReleased = true;
this.nativeHookDebugLevel = DEFAULT_NATIVE_HOOK_DEBUG_LEVEL;
this.mouseGestureActivationKey = KeyEvent.VK_CAPS_LOCK;
RECORD = new KeyChain(KeyEvent.VK_F9);
REPLAY = new KeyChain(KeyEvent.VK_F11);
COMPILED_REPLAY = new KeyChain(KeyEvent.VK_F12);
}
public DynamicCompilerManager getCompilerFactory() {
return compilerFactory;
}
protected static ConfigParser getConfigParser(String version) {
for (ConfigParser parser : knownParsers) {
if (parser.getVersion().equals(version)) {
return parser;
}
}
return null;
}
/**
* Get config parser whose previous version is this version
* @param version the version to consider
* @return the config parser whose previous version is this version
*/
protected static ConfigParser getNextConfigParser(String version) {
for (ConfigParser parser : knownParsers) {
String previousVersion = parser.getPreviousVersion();
if (previousVersion != null && previousVersion.equals(version)) {
return parser;
}
}
return null;
}
public void loadConfig(File file) {
compilerFactory = new DynamicCompilerManager();
File configFile = file == null ? new File(CONFIG_FILE_NAME) : file;
if (FileUtility.fileExists(configFile)) {
JsonRootNode root = JSONUtility.readJSON(configFile);
if (root == null) {
JOptionPane.showMessageDialog(null, "Config file is not in json format");
return;
} else if (!root.isStringValue("version")) {
JOptionPane.showMessageDialog(null, "Config file is in unknown version");
return;
}
String version = root.getStringValue("version");
ConfigParser parser = getConfigParser(version);
boolean foundVersion = parser != null;
boolean extractResult = false;
if (foundVersion) {
extractResult = parser.extractData(this, root);
}
if (!foundVersion) {
JOptionPane.showMessageDialog(null, "Config file is in unknown version " + version);
defaultExtract();
}
if (!extractResult) {
JOptionPane.showMessageDialog(null, "Cannot extract result with version " + version);
defaultExtract();
}
} else {
defaultExtract();
}
}
private void defaultExtract() {
List<TaskGroup> taskGroups = backEnd.getTaskGroups();
taskGroups.add(new TaskGroup("default"));
backEnd.setCurrentTaskGroup(taskGroups.get(0));
}
public boolean writeConfig() {
List<JsonNode> taskNodes = new ArrayList<>();
for (TaskGroup group : backEnd.getTaskGroups()) {
taskNodes.add(group.jsonize());
}
JsonRootNode root = JsonNodeFactories.object(
JsonNodeFactories.field("version", JsonNodeFactories.string(CURRENT_CONFIG_VERSION)),
JsonNodeFactories.field("global_settings", JsonNodeFactories.object(
JsonNodeFactories.field("debug", JsonNodeFactories.object(
JsonNodeFactories.field("level", JsonNodeFactories.string(nativeHookDebugLevel.toString()))
)),
JsonNodeFactories.field("tray_icon_enabled", JsonNodeFactories.booleanNode(useTrayIcon)),
JsonNodeFactories.field("enabled_halt_by_key", JsonNodeFactories.booleanNode(enabledHaltingKeyPressed)),
JsonNodeFactories.field("execute_on_key_released", JsonNodeFactories.booleanNode(executeOnKeyReleased)),
JsonNodeFactories.field("global_hotkey", JsonNodeFactories.object(
JsonNodeFactories.field("mouse_gesture_activation", JsonNodeFactories.number(mouseGestureActivationKey)),
JsonNodeFactories.field("record", RECORD.jsonize()),
JsonNodeFactories.field("replay", REPLAY.jsonize()),
JsonNodeFactories.field("replay_compiled", COMPILED_REPLAY.jsonize())
))
)),
JsonNodeFactories.field("ipc_settings", IPCServiceManager.jsonize()),
JsonNodeFactories.field("compilers", compilerFactory.jsonize()),
JsonNodeFactories.field("task_groups", JsonNodeFactories.array(taskNodes))
);
return JSONUtility.writeJson(root, new File(CONFIG_FILE_NAME));
}
public boolean exportTasksConfig(File destination) {
List<JsonNode> taskNodes = new ArrayList<>();
for (TaskGroup group : backEnd.getTaskGroups()) {
taskNodes.add(group.jsonize());
}
JsonRootNode root = JsonNodeFactories.object(
JsonNodeFactories.field("version", JsonNodeFactories.string(CURRENT_CONFIG_VERSION)),
JsonNodeFactories.field("task_groups", JsonNodeFactories.array(taskNodes)));
String fullPath = FileUtility.joinPath(destination.getAbsolutePath(), EXPORTED_CONFIG_FILE_NAME);
return JSONUtility.writeJson(root, new File(fullPath));
}
public boolean importTaskConfig() {
File configFile = new File(EXPORTED_CONFIG_FILE_NAME);
if (!configFile.isFile()) {
getLogger().warning("Config file does not exist " + configFile.getAbsolutePath());
return false;
}
JsonRootNode root = JSONUtility.readJSON(configFile);
if (root == null) {
getLogger().warning("Unable to import config file " + configFile.getAbsolutePath());
return false;
}
String version = root.getStringValue("version");
ConfigParser parser = getConfigParser(version);
if (parser == null) {
getLogger().warning("Uknown version " + version);
return false;
}
boolean result = parser.importData(this, root);
return result;
}
public int getMouseGestureActivationKey() {
return mouseGestureActivationKey;
}
public void setMouseGestureActivationKey(int mouseGestureActivationKey) {
this.mouseGestureActivationKey = mouseGestureActivationKey;
}
public KeyChain getRECORD() {
return RECORD;
}
public void setRECORD(KeyChain RECORD) {
if (RECORD != null) {
this.RECORD = RECORD;
}
}
public void setRECORD(int RECORD) {
setRECORD(new KeyChain(Arrays.asList(RECORD)));
}
public KeyChain getREPLAY() {
return REPLAY;
}
public void setREPLAY(KeyChain REPLAY) {
if (REPLAY != null) {
this.REPLAY = REPLAY;
}
}
public KeyChain getCOMPILED_REPLAY() {
return COMPILED_REPLAY;
}
public void setCOMPILED_REPLAY(KeyChain COMPILED_REPLAY) {
if (COMPILED_REPLAY != null) {
this.COMPILED_REPLAY = COMPILED_REPLAY;
}
}
public boolean isUseTrayIcon() {
return useTrayIcon;
}
public void setUseTrayIcon(boolean useTrayIcon) {
this.useTrayIcon = useTrayIcon;
}
public boolean isExecuteOnKeyReleased() {
return executeOnKeyReleased;
}
public void setExecuteOnKeyReleased(boolean executeOnKeyReleased) {
this.executeOnKeyReleased = executeOnKeyReleased;
}
public Level getNativeHookDebugLevel() {
return nativeHookDebugLevel;
}
public void setNativeHookDebugLevel(Level nativeHookDebugLevel) {
this.nativeHookDebugLevel = nativeHookDebugLevel;
}
protected MainBackEndHolder getBackEnd() {
return backEnd;
}
public boolean isEnabledHaltingKeyPressed() {
return enabledHaltingKeyPressed;
}
public void setEnabledHaltingKeyPressed(boolean enabledHaltingKeyPressed) {
this.enabledHaltingKeyPressed = enabledHaltingKeyPressed;
}
@Override
public Logger getLogger() {
return Logger.getLogger(Config.class.getName());
}
}
|
package fi.tnie.db.ent;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
//import org.apache.log4j.Logger;
import fi.tnie.db.rpc.ReferenceHolder;
import fi.tnie.db.types.ReferenceType;
import fi.tnie.db.ent.value.EntityKey;
import fi.tnie.db.expr.AbstractTableReference;
import fi.tnie.db.expr.DefaultTableExpression;
import fi.tnie.db.expr.ForeignKeyJoinCondition;
import fi.tnie.db.expr.From;
import fi.tnie.db.expr.OrderBy;
import fi.tnie.db.expr.Predicate;
import fi.tnie.db.expr.QueryExpression;
import fi.tnie.db.expr.Select;
import fi.tnie.db.expr.ColumnReference;
import fi.tnie.db.expr.SelectStatement;
import fi.tnie.db.expr.TableReference;
import fi.tnie.db.expr.OrderBy.Order;
import fi.tnie.db.log.DefaultLogger;
import fi.tnie.db.log.Logger;
import fi.tnie.db.meta.BaseTable;
import fi.tnie.db.meta.Column;
import fi.tnie.db.meta.ForeignKey;
public class DefaultEntityTemplateQuery<
A extends Attribute,
R extends Reference,
T extends ReferenceType<A, R, T, E, H, F, M>,
E extends Entity<A, R, T, E, H, F, M>,
H extends ReferenceHolder<A, R, T, E, H, M>,
F extends EntityFactory<E, H, M, F>,
M extends EntityMetaData<A, R, T, E, H, F, M>,
Q extends EntityQueryTemplate<A, R, T, E, H, F, M, Q>
>
implements EntityQuery<A, R, T, E, H, F, M, Q>
{
// private static Logger logger = Logger.getLogger(DefaultEntityQuery.class);
private static final long serialVersionUID = -5505364328412305185L;
private T type;
private transient DefaultTableExpression query;
private transient QueryExpression queryExpression;
private transient TableReference rootRef;
private transient List<Predicate> predicateList;
private transient Map<TableReference, EntityMetaData<?, ?, ?, ?, ?, ?, ?>> metaDataMap;
private transient LinkedHashMap<Integer, TableReference> originMap;
private transient Map<JoinKey, TableReference> referenceMap;
private transient Map<EntityQueryTemplateAttribute, ColumnReference> columnMap;
private transient Map<EntityQuerySortKey<?>, ColumnReference> sortKeyColumnMap = new HashMap<EntityQuerySortKey<?>, ColumnReference>();
private transient Map<EntityQueryPredicate<?>, ColumnReference> predicateColumnMap = new HashMap<EntityQueryPredicate<?>, ColumnReference>();
private transient List<ColumnReference> rootPrimaryKey;
private Q template;
/**
* No-argument constructor for GWT Serialization
*/
protected DefaultEntityTemplateQuery() {
}
public DefaultEntityTemplateQuery(Q rootTemplate) {
if (rootTemplate == null) {
throw new NullPointerException();
}
this.template = rootTemplate;
this.type = rootTemplate.getMetaData().getType();
}
public DefaultEntityTemplateQuery(Q root, boolean init)
throws CyclicTemplateException, EntityRuntimeException {
this(root);
if (init) {
init();
}
}
private void init() throws CyclicTemplateException, EntityRuntimeException {
if (isInitialized()) {
return;
}
logger().debug("init - enter");
Q root = this.template;
BaseTable table = getMetaData().getBaseTable();
if (table == null) {
throw new NullPointerException("EntityMetaData.getBaseTable()");
}
DefaultTableExpression q = new DefaultTableExpression();
HashSet<EntityQueryTemplate<?,?,?,?,?,?,?,?>> visited = new HashSet<EntityQueryTemplate<?,?,?,?,?,?,?,?>>();
AbstractTableReference tref = fromTemplate(root, null, null, null, q, visited);
logger().debug("ref: " + tref);
q.setFrom(new From(tref));
this.query = q;
List<EntityQuerySortKey<?>> sortKeyList = root.allSortKeys();
if (sortKeyList.isEmpty()) {
this.queryExpression = this.query;
}
else {
OrderBy ob = q.getOrderBy();
if (ob == null) {
ob = new OrderBy();
for (EntityQuerySortKey<?> sk : sortKeyList) {
ColumnReference cr = sortKeyColumnMap.get(sk);
// Only template attributes which are used as sort keys have associated column reference.
// Other sort keys just do without.
ob.add(sk.sortKey(cr));
}
}
for (ColumnReference pkcol : getRootPrimaryKey()) {
ob.add(pkcol, Order.ASC);
}
// Limit le = (this.limit == null) ? null : new Limit(limit.longValue());
// Offset oe = (this.offset == null) ? null : new Offset(this.offset.longValue());
SelectStatement sq = new SelectStatement(q, ob, null, null);
this.queryExpression = sq;
}
logger().debug("init - exit");
}
private
<
MA extends Attribute,
MR extends Reference,
MT extends ReferenceType<MA, MR, MT, ME, MH, MF, MM>,
ME extends Entity<MA, MR, MT, ME, MH, MF, MM>,
MH extends ReferenceHolder<MA, MR, MT, ME, MH, MM>,
MF extends EntityFactory<ME, MH, MM, MF>,
MM extends EntityMetaData<MA, MR, MT, ME, MH, MF, MM>,
MQ extends EntityQueryTemplate<MA, MR, MT, ME, MH, MF, MM, MQ>
>
AbstractTableReference fromTemplate(
MQ template,
AbstractTableReference qref, ForeignKey fk, TableReference referencing,
DefaultTableExpression q,
Set<EntityQueryTemplate<?, ?, ?, ?, ?, ?, ?, ?>> visited)
throws CyclicTemplateException, EntityRuntimeException {
logger().debug("fromTemplate - enter: " + template);
logger().debug("fromTemplate - fk: " + fk);
if (visited.contains(template)) {
throw new CyclicTemplateException(template);
}
else {
visited.add(template);
}
Select s = getSelect(q);
MM meta = template.getMetaData();
final boolean root = (qref == null);
final TableReference tref = (qref == null) ? getTableRef() : new TableReference(meta.getBaseTable());
getMetaDataMap().put(tref, meta);
if (referencing != null) {
JoinKey j = new JoinKey(referencing, fk);
getReferenceMap().put(j, tref);
}
if (qref == null) {
qref = tref;
}
else {
ForeignKeyJoinCondition jc = new ForeignKeyJoinCondition(fk, referencing, tref);
qref = qref.leftJoin(tref, jc);
}
Set<Column> pkcols = meta.getPKDefinition();
for (Column c : pkcols) {
ColumnReference cref = new ColumnReference(tref, c);
s.add(cref);
if (root) {
getRootPrimaryKey().add(cref);
}
}
addAttributes(template, s, tref);
List<EntityQueryPredicate<MA>> ps = template.predicates();
for (EntityQueryPredicate<MA> k : ps) {
MA a = k.attribute();
ColumnReference cref = null;
if (a != null) {
Column c = meta.getColumn(a);
cref = new ColumnReference(tref, c);
predicateColumnMap.put(k, cref);
}
}
List<EntityQuerySortKey<MA>> keys = template.sortKeys();
for (EntityQuerySortKey<MA> k : keys) {
MA a = k.attribute();
ColumnReference cref = null;
if (a != null) {
Column c = meta.getColumn(a);
cref = new ColumnReference(tref, c);
sortKeyColumnMap.put(k, cref);
}
}
getOriginMap().put(Integer.valueOf(s.getColumnCount()), tref);
qref = processReferences(template, qref, tref, q, visited);
return qref;
}
private <
KA extends Attribute,
KR extends Reference,
KT extends ReferenceType<KA, KR, KT, KE, ?, ?, KM>,
KE extends Entity<KA, KR, KT, KE, ?, ?, KM>,
KM extends EntityMetaData<KA, KR, KT, KE, ?, ?, KM>,
KQ extends EntityQueryTemplate<KA, KR, KT, KE, ?, ?, KM, KQ>
>
AbstractTableReference processReferences(KQ template, AbstractTableReference qref, TableReference tref, DefaultTableExpression q, Set<EntityQueryTemplate<?, ?, ?, ?, ?, ?, ?, ?>> visited) throws EntityRuntimeException, CyclicTemplateException {
KM meta = template.getMetaData();
Set<KR> rs = meta.relationships();
for (KR kr : rs) {
EntityKey<KR, KT, KE, KM, ?, ?, ?, ?, ?, ?, ?, ?> ek = meta.getEntityKey(kr);
EntityQueryTemplate<?, ?, ?, ?, ?, ?, ?, ?> t = template.getTemplate(ek);
if (t != null) {
ForeignKey fk = meta.getForeignKey(kr);
if (fk == null) {
throw new NullPointerException("can not find fk by relationship: " + kr);
}
qref = fromTemplate(t.self(), qref, fk, tref, q, visited);
}
}
return qref;
}
private Select getSelect(DefaultTableExpression q) {
Select s = q.getSelect();
if (s == null) {
q.setSelect(s = new Select());
}
return s;
}
private <
MA extends Attribute,
D extends Entity<MA, ?, ?, D, ?, ?, DM>,
DM extends EntityMetaData<MA, ?, ?, D, ?, ?, DM>,
DQ extends EntityQueryTemplate<MA, ?, ?, D, ?, ?, DM, DQ>
>
void addAttributes(DQ template, Select s, TableReference tref) throws EntityRuntimeException {
DM meta = template.getMetaData();
Set<MA> as = meta.attributes();
for (MA a : as) {
EntityQueryTemplateAttribute ta = template.get(a);
if (ta == null) {
continue;
}
Column c = meta.getColumn(a);
ColumnReference cref = new ColumnReference(tref, c);
getColumnMap().put(ta, cref);
if (ta.isSelected(cref)) {
s.add(cref);
}
Predicate p = ta.createPredicate(cref);
addPredicate(p);
// PrimitiveHolder<?, ?> h = template.value(a);
// if (h != null) {
// Column c = meta.getColumn(a);
// // primary column are added separately:
// if (c != null && c.isPrimaryKeyColumn() == false) {
// s.add(new ColumnReference(tref, c));
}
}
private boolean addPredicate(Predicate p) {
if (p == null) {
return false;
}
return getPredicateList().add(p);
}
@Override
public DefaultTableExpression getTableExpression()
throws CyclicTemplateException, EntityRuntimeException {
init();
return this.query;
}
@Override
public QueryExpression getQueryExpression()
throws CyclicTemplateException, EntityRuntimeException {
init();
return this.queryExpression;
}
/**
* Returns the root table-reference for this query.
* @return
*/
@Override
public TableReference getTableRef() {
if (this.rootRef == null) {
this.rootRef = new TableReference(getMetaData().getBaseTable());
}
return this.rootRef;
}
// @Override
// public Long getLimit() {
// return null;
// @Override
// public int getOffset() {
// return 0;
@Override
public M getMetaData() {
return this.type.getMetaData();
}
@Override
public EntityMetaData<?, ?, ?, ?, ?, ?, ?> getMetaData(TableReference tref)
throws CyclicTemplateException, EntityRuntimeException {
if (tref == null) {
throw new NullPointerException("tref");
}
init();
return getMetaDataMap().get(tref);
}
/**
* Returns the table reference which the specified <code>column</code> originates from.
* Column numbering starts from 1.
*
* Throws {@link IndexOutOfBoundsException} if column < 1.
*/
@Override
public TableReference getOrigin(int column)
throws CyclicTemplateException, EntityRuntimeException {
if (column < 1) {
throw new IndexOutOfBoundsException();
}
init();
Map<Integer, TableReference> om = getOriginMap();
for (Integer k : om.keySet()) {
if (column <= k.intValue()) {
return om.get(k);
}
}
return null;
}
private Map<TableReference, EntityMetaData<?, ?, ?, ?, ?, ?, ?>> getMetaDataMap() {
if (metaDataMap == null) {
metaDataMap = new HashMap<TableReference, EntityMetaData<?,?,?,?, ?, ?, ?>>();
}
return metaDataMap;
}
@Override
public TableReference getReferenced(TableReference referencing, ForeignKey fk) {
JoinKey k = new JoinKey(referencing, fk);
TableReference r = getReferenceMap().get(k);
return r;
}
private static class JoinKey
implements Serializable {
private static final long serialVersionUID = -2839759478114689320L;
private TableReference referencing;
private ForeignKey foreignKey;
/**
* No-argument constructor for GWT Serialization
*/
@SuppressWarnings("unused")
private JoinKey() {
}
public JoinKey(TableReference referencing, ForeignKey foreignKey) {
super();
this.referencing = referencing;
this.foreignKey = foreignKey;
}
@Override
public int hashCode() {
return referencing.hashCode() ^ foreignKey.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
throw new NullPointerException("obj");
}
JoinKey j = (JoinKey) obj;
return
this.referencing.equals(j.referencing) &&
this.foreignKey.equals(j.foreignKey);
}
}
// private static Logger logger() {
// return DefaultEntityQuery.logger;
private List<Predicate> getPredicateList() {
if (predicateList == null) {
predicateList = new ArrayList<Predicate>();
}
return predicateList;
}
// private List<EntityQuerySortKey> getSortKeyList() {
// if (sortKeyList == null) {
// sortKeyList = new ArrayList<EntityQuerySortKey>();
// return sortKeyList;
// private boolean addSortKey(EntityQuerySortKey sk) {
// if (sk == null) {
// return false;
// return getSortKeyList().add(sk);
private List<ColumnReference> getRootPrimaryKey() {
if (rootPrimaryKey == null) {
rootPrimaryKey = new ArrayList<ColumnReference>();
}
return rootPrimaryKey;
}
public Q getTemplate() {
return template;
}
private Map<Integer, TableReference> getOriginMap() {
if (originMap == null) {
originMap = new LinkedHashMap<Integer, TableReference>();
}
return originMap;
}
private Map<JoinKey, TableReference> getReferenceMap() {
if (referenceMap == null) {
referenceMap = new HashMap<JoinKey, TableReference>();
}
return referenceMap;
}
private boolean isInitialized() {
return (this.queryExpression != null);
}
public Map<EntityQueryTemplateAttribute, ColumnReference> getColumnMap() {
if (columnMap == null) {
columnMap = new HashMap<EntityQueryTemplateAttribute, ColumnReference>();
}
return columnMap;
}
public Logger logger() {
return DefaultLogger.getLogger();
}
}
|
package org.voltdb.compiler;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.hsqldb_voltpatches.HSQLInterface;
import org.json_voltpatches.JSONException;
import org.voltcore.logging.Level;
import org.voltcore.logging.VoltLogger;
import org.voltdb.CatalogContext;
import org.voltdb.ProcInfoData;
import org.voltdb.RealVoltDB;
import org.voltdb.TransactionIdManager;
import org.voltdb.VoltDB;
import org.voltdb.VoltType;
import org.voltdb.catalog.Catalog;
import org.voltdb.catalog.CatalogMap;
import org.voltdb.catalog.Column;
import org.voltdb.catalog.ColumnRef;
import org.voltdb.catalog.Constraint;
import org.voltdb.catalog.Database;
import org.voltdb.catalog.FilteredCatalogDiffEngine;
import org.voltdb.catalog.Index;
import org.voltdb.catalog.MaterializedViewInfo;
import org.voltdb.catalog.Procedure;
import org.voltdb.catalog.Statement;
import org.voltdb.catalog.Table;
import org.voltdb.common.Constants;
import org.voltdb.compiler.projectfile.ClassdependenciesType.Classdependency;
import org.voltdb.compiler.projectfile.DatabaseType;
import org.voltdb.compiler.projectfile.ExportType;
import org.voltdb.compiler.projectfile.ExportType.Tables;
import org.voltdb.compiler.projectfile.GroupsType;
import org.voltdb.compiler.projectfile.PartitionsType;
import org.voltdb.compiler.projectfile.ProceduresType;
import org.voltdb.compiler.projectfile.ProjectType;
import org.voltdb.compiler.projectfile.RolesType;
import org.voltdb.compiler.projectfile.SchemasType;
import org.voltdb.compilereport.ReportMaker;
import org.voltdb.expressions.AbstractExpression;
import org.voltdb.expressions.TupleValueExpression;
import org.voltdb.types.ConstraintType;
import org.voltdb.utils.CatalogSchemaTools;
import org.voltdb.utils.CatalogUtil;
import org.voltdb.utils.Encoder;
import org.voltdb.utils.InMemoryJarfile;
import org.voltdb.utils.InMemoryJarfile.JarLoader;
import org.voltdb.utils.LogKeys;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import com.google_voltpatches.common.collect.ImmutableList;
/**
* Compiles a project XML file and some metadata into a Jarfile
* containing stored procedure code and a serialzied catalog.
*
*/
public class VoltCompiler {
/** Represents the level of severity for a Feedback message generated during compiling. */
public static enum Severity { INFORMATIONAL, WARNING, ERROR, UNEXPECTED }
public static final int NO_LINE_NUMBER = -1;
// Causes the "debugoutput" folder to be generated and populated.
// Also causes explain plans on disk to include cost.
public final static boolean DEBUG_MODE = System.getProperties().contains("compilerdebug");
// feedback by filename
ArrayList<Feedback> m_infos = new ArrayList<Feedback>();
ArrayList<Feedback> m_warnings = new ArrayList<Feedback>();
ArrayList<Feedback> m_errors = new ArrayList<Feedback>();
// set of annotations by procedure name
private Map<String, ProcInfoData> m_procInfoOverrides = null;
// Name of DDL file built by the DDL VoltCompiler from the catalog and added to the jar.
public static String AUTOGEN_DDL_FILE_NAME = "autogen-ddl.sql";
// Environment variable used to verify that a catalog created from autogen-dll.sql is effectively
// identical to the original catalog that was used to create the autogen-ddl.sql file.
public static final boolean DEBUG_VERIFY_CATALOG = System.getProperties().containsKey("verifycatalogdebug");
String m_projectFileURL = null;
String m_currentFilename = null;
Map<String, String> m_ddlFilePaths = new HashMap<String, String>();
String[] m_addedClasses = null;
// generated html text for catalog report
String m_report = null;
String m_reportPath = null;
Catalog m_catalog = null;
DatabaseEstimates m_estimates = new DatabaseEstimates();
private List<String> m_capturedDiagnosticDetail = null;
private static final VoltLogger compilerLog = new VoltLogger("COMPILER");
private static final VoltLogger consoleLog = new VoltLogger("CONSOLE");
private static final VoltLogger Log = new VoltLogger("org.voltdb.compiler.VoltCompiler");
private ClassLoader m_classLoader = ClassLoader.getSystemClassLoader();
/**
* Represents output from a compile. This works similarly to Log4j; there
* are different levels of feedback including info, warning, error, and
* unexpected error. Feedback can be output to a printstream (like stdout)
* or can be examined programatically.
*
*/
public static class Feedback {
Severity severityLevel;
String fileName;
int lineNo;
String message;
Feedback(final Severity severityLevel, final String message, final String fileName, final int lineNo) {
this.severityLevel = severityLevel;
this.message = message;
this.fileName = fileName;
this.lineNo = lineNo;
}
public String getStandardFeedbackLine() {
String retval = "";
if (severityLevel == Severity.INFORMATIONAL)
retval = "INFO";
if (severityLevel == Severity.WARNING)
retval = "WARNING";
if (severityLevel == Severity.ERROR)
retval = "ERROR";
if (severityLevel == Severity.UNEXPECTED)
retval = "UNEXPECTED ERROR";
return retval + " " + getLogString();
}
public String getLogString() {
String retval = new String();
if (fileName != null) {
retval += "[" + fileName;
if (lineNo != NO_LINE_NUMBER)
retval += ":" + lineNo;
retval += "]";
}
retval += ": " + message;
return retval;
}
public Severity getSeverityLevel() {
return severityLevel;
}
public String getFileName() {
return fileName;
}
public int getLineNumber() {
return lineNo;
}
public String getMessage() {
return message;
}
}
class VoltCompilerException extends Exception {
private static final long serialVersionUID = -2267780579911448600L;
private String message = null;
VoltCompilerException(final Exception e) {
super(e);
}
VoltCompilerException(final String message, final int lineNo) {
addErr(message, lineNo);
this.message = message;
}
VoltCompilerException(final String message) {
addErr(message);
this.message = message;
}
@Override
public String getMessage() {
return message;
}
}
class VoltXMLErrorHandler implements ErrorHandler {
@Override
public void error(final SAXParseException exception) throws SAXException {
addErr(exception.getMessage(), exception.getLineNumber());
}
@Override
public void fatalError(final SAXParseException exception) throws SAXException {
//addErr(exception.getMessage(), exception.getLineNumber());
}
@Override
public void warning(final SAXParseException exception) throws SAXException {
addWarn(exception.getMessage(), exception.getLineNumber());
}
}
public class ProcedureDescriptor {
public final ArrayList<String> m_authGroups;
public final String m_className;
// for single-stmt procs
public final String m_singleStmt;
public final String m_joinOrder;
public final String m_partitionString;
public final boolean m_builtInStmt; // autogenerated sql statement
public final Language m_language; // Java or Groovy
public final String m_scriptImpl; // Procedure code from DDL (if any)
public final Class<?> m_class;
ProcedureDescriptor (final ArrayList<String> authGroups, final String className) {
assert(className != null);
m_authGroups = authGroups;
m_className = className;
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = null;
m_builtInStmt = false;
m_language = null;
m_scriptImpl = null;
m_class = null;
}
public ProcedureDescriptor(final ArrayList<String> authGroups, final Language language, final String scriptImpl, Class<?> clazz) {
assert(clazz != null && language != null);
m_authGroups = authGroups;
m_className = clazz.getName();
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = null;
m_builtInStmt = false;
m_language = language;
m_scriptImpl = scriptImpl;
m_class = clazz;
}
ProcedureDescriptor(final ArrayList<String> authGroups, final Class<?> clazz, final String partitionString, final Language language, final String scriptImpl) {
assert(clazz != null);
assert(partitionString != null);
m_authGroups = authGroups;
m_className = clazz.getName();
m_singleStmt = null;
m_joinOrder = null;
m_partitionString = partitionString;
m_builtInStmt = false;
m_language = language;
m_scriptImpl = scriptImpl;
m_class = clazz;
}
ProcedureDescriptor (final ArrayList<String> authGroups, final String className,
final String singleStmt, final String joinOrder, final String partitionString,
boolean builtInStmt, Language language, final String scriptImpl, Class<?> clazz)
{
assert(className != null);
assert(singleStmt != null);
m_authGroups = authGroups;
m_className = className;
m_singleStmt = singleStmt;
m_joinOrder = joinOrder;
m_partitionString = partitionString;
m_builtInStmt = builtInStmt;
m_language = language;
m_scriptImpl = scriptImpl;
m_class = clazz;
}
}
public boolean hasErrors() {
return m_errors.size() > 0;
}
public boolean hasErrorsOrWarnings() {
return (m_warnings.size() > 0) || hasErrors();
}
void addInfo(final String msg) {
addInfo(msg, NO_LINE_NUMBER);
}
void addWarn(final String msg) {
addWarn(msg, NO_LINE_NUMBER);
}
void addErr(final String msg) {
addErr(msg, NO_LINE_NUMBER);
}
void addInfo(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.INFORMATIONAL, msg, m_currentFilename, lineNo);
m_infos.add(fb);
compilerLog.info(fb.getLogString());
}
void addWarn(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.WARNING, msg, m_currentFilename, lineNo);
m_warnings.add(fb);
compilerLog.warn(fb.getLogString());
}
void addErr(final String msg, final int lineNo) {
final Feedback fb = new Feedback(Severity.ERROR, msg, m_currentFilename, lineNo);
m_errors.add(fb);
compilerLog.error(fb.getLogString());
}
/**
* Compile from a set of DDL files, but no project.xml.
*
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The array of DDL files to compile (at least one is required).
* @return true if successful
* @throws VoltCompilerException
*/
public boolean compileFromDDL(
final String jarOutputPath,
final String... ddlFilePaths)
throws VoltCompilerException
{
return compileWithProjectXML(null, jarOutputPath, ddlFilePaths);
}
/**
* Compile optionally using a (DEPRECATED) project.xml file.
* This internal method prepares to compile with or without a project file.
*
* @param projectFileURL URL of the project file or NULL if not used.
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The array of DDL files to compile (at least one is required if there's a project file).
* @return true if successful
*/
public boolean compileWithProjectXML(
final String projectFileURL,
final String jarOutputPath,
final String... ddlFilePaths)
{
VoltCompilerReader projectReader = null;
if (projectFileURL != null) {
try {
projectReader = new VoltCompilerFileReader(projectFileURL);
}
catch (IOException e) {
compilerLog.error(String.format(
"Failed to initialize reader for project file \"%s\".",
projectFileURL));
return false;
}
}
else if (ddlFilePaths.length == 0) {
compilerLog.error(String.format(
"At least one DDL file is required if no project file is specified.",
projectFileURL));
return false;
}
List<VoltCompilerReader> ddlReaderList;
try {
ddlReaderList = DDLPathsToReaderList(ddlFilePaths);
}
catch (VoltCompilerException e) {
compilerLog.error("Unable to open DDL file.", e);;
return false;
}
return compileInternal(projectReader, jarOutputPath, ddlReaderList, null);
}
/**
* Internal method that takes the generated DDL from the catalog and builds a new catalog.
* The generated catalog is diffed with the original catalog to verify compilation and
* catalog generation consistency.
*/
private void debugVerifyCatalog(VoltCompilerReader origDDLFileReader, Catalog origCatalog)
{
final VoltCompiler autoGenCompiler = new VoltCompiler();
List<VoltCompilerReader> autogenReaderList = new ArrayList<VoltCompilerReader>(1);
autogenReaderList.add(origDDLFileReader);
DatabaseType autoGenDatabase = getProjectDatabase(null);
InMemoryJarfile autoGenJarOutput = new InMemoryJarfile();
autoGenCompiler.m_currentFilename = AUTOGEN_DDL_FILE_NAME;
Catalog autoGenCatalog = autoGenCompiler.compileCatalogInternal(autoGenDatabase,
autogenReaderList, autoGenJarOutput);
FilteredCatalogDiffEngine diffEng = new FilteredCatalogDiffEngine(origCatalog, autoGenCatalog);
String diffCmds = diffEng.commands();
if (diffCmds != null && !diffCmds.equals("")) {
assert(false);
}
}
/**
* Internal method for compiling with and without a project.xml file or DDL files.
*
* @param projectReader Reader for project file or null if a project file is not used.
* @param jarOutputPath The location to put the finished JAR to.
* @param ddlFilePaths The list of DDL files to compile (when no project is provided).
* @param jarOutputRet The in-memory jar to populate or null if the caller doesn't provide one.
* @return true if successful
*/
private boolean compileInternal(
final VoltCompilerReader projectReader,
final String jarOutputPath,
final List<VoltCompilerReader> ddlReaderList,
final InMemoryJarfile jarOutputRet)
{
// Expect to have either >1 ddl file or a project file.
assert(ddlReaderList.size() > 0 || projectReader != null);
// Make a temporary local output jar if one wasn't provided.
final InMemoryJarfile jarOutput = (jarOutputRet != null
? jarOutputRet
: new InMemoryJarfile());
m_projectFileURL = (projectReader != null ? projectReader.getPath() : null);
if (m_projectFileURL == null && (ddlReaderList == null || ddlReaderList.isEmpty())) {
addErr("One or more DDL files are required.");
return false;
}
if (jarOutputPath == null) {
addErr("The output jar path is null.");
return false;
}
// clear out the warnings and errors
m_warnings.clear();
m_infos.clear();
m_errors.clear();
// do all the work to get the catalog
DatabaseType database = getProjectDatabase(projectReader);
if (database == null) {
return false;
}
final Catalog catalog = compileCatalogInternal(database, ddlReaderList, jarOutput);
if (catalog == null) {
return false;
}
// Build DDL from Catalog Data
String binDDL = CatalogSchemaTools.toSchema(catalog, m_addedClasses);
jarOutput.put(AUTOGEN_DDL_FILE_NAME, binDDL.getBytes(Constants.UTF8ENCODING));
if (DEBUG_VERIFY_CATALOG) {
debugVerifyCatalog(new VoltCompilerJarFileReader(jarOutput, AUTOGEN_DDL_FILE_NAME), catalog);
}
// WRITE CATALOG TO JAR HERE
final String catalogCommands = catalog.serialize();
byte[] catalogBytes = catalogCommands.getBytes(Constants.UTF8ENCODING);
try {
// Don't update buildinfo if it's already present, e.g. while upgrading.
// Note when upgrading the version has already been updated by the caller.
if (!jarOutput.containsKey(CatalogUtil.CATALOG_BUILDINFO_FILENAME)) {
StringBuilder buildinfo = new StringBuilder();
String info[] = RealVoltDB.extractBuildInfo();
buildinfo.append(info[0]).append('\n');
buildinfo.append(info[1]).append('\n');
buildinfo.append(System.getProperty("user.name")).append('\n');
buildinfo.append(System.getProperty("user.dir")).append('\n');
buildinfo.append(Long.toString(System.currentTimeMillis())).append('\n');
byte buildinfoBytes[] = buildinfo.toString().getBytes(Constants.UTF8ENCODING);
jarOutput.put(CatalogUtil.CATALOG_BUILDINFO_FILENAME, buildinfoBytes);
}
jarOutput.put(CatalogUtil.CATALOG_FILENAME, catalogBytes);
// put the compiler report into the jarfile
jarOutput.put("catalog-report.html", m_report.getBytes(Constants.UTF8ENCODING));
jarOutput.writeToFile(new File(jarOutputPath)).run();
}
catch (final Exception e) {
e.printStackTrace();
return false;
}
assert(!hasErrors());
if (hasErrors()) {
return false;
}
return true;
}
/**
* Get textual explain plan info for each plan from the
* catalog to be shoved into the catalog jarfile.
*/
HashMap<String, byte[]> getExplainPlans(Catalog catalog) {
HashMap<String, byte[]> retval = new HashMap<String, byte[]>();
Database db = getCatalogDatabase();
assert(db != null);
for (Procedure proc : db.getProcedures()) {
for (Statement stmt : proc.getStatements()) {
String s = "SQL: " + stmt.getSqltext() + "\n";
s += "COST: " + Integer.toString(stmt.getCost()) + "\n";
s += "PLAN:\n\n";
s += Encoder.hexDecodeToString(stmt.getExplainplan()) + "\n";
byte[] b = s.getBytes(Constants.UTF8ENCODING);
retval.put(proc.getTypeName() + "_" + stmt.getTypeName() + ".txt", b);
}
}
return retval;
}
private VoltCompilerFileReader createDDLFileReader(String path)
throws VoltCompilerException
{
try {
return new VoltCompilerFileReader(VoltCompilerFileReader.getSchemaPath(m_projectFileURL, path));
}
catch (IOException e) {
String msg = String.format("Unable to open schema file \"%s\" for reading: %s", path, e.getMessage());
throw new VoltCompilerException(msg);
}
}
private List<VoltCompilerReader> DDLPathsToReaderList(final String... ddlFilePaths)
throws VoltCompilerException
{
List<VoltCompilerReader> ddlReaderList = new ArrayList<VoltCompilerReader>(ddlFilePaths.length);
for (int i = 0; i < ddlFilePaths.length; ++i) {
ddlReaderList.add(createDDLFileReader(ddlFilePaths[i]));
}
return ddlReaderList;
}
/**
* Compile from DDL files (only).
* @param ddlFilePaths input ddl files
* @return compiled catalog
* @throws VoltCompilerException
*/
public Catalog compileCatalogFromDDL(final String... ddlFilePaths)
throws VoltCompilerException
{
DatabaseType database = getProjectDatabase(null);
InMemoryJarfile jarOutput = new InMemoryJarfile();
return compileCatalogInternal(database, DDLPathsToReaderList(ddlFilePaths), jarOutput);
}
/**
* Compile from project file (without explicit DDL file paths).
* @param projectFileURL project file URL/path
* @return compiled catalog
* @throws VoltCompilerException
*/
public Catalog compileCatalogFromProject(final String projectFileURL)
throws VoltCompilerException
{
VoltCompilerReader projectReader = null;
try {
projectReader = new VoltCompilerFileReader(projectFileURL);
}
catch (IOException e) {
throw new VoltCompilerException(String.format(
"Unable to create project reader for \"%s\": %s",
projectFileURL, e.getMessage()));
}
DatabaseType database = getProjectDatabase(projectReader);
InMemoryJarfile jarOutput = new InMemoryJarfile();
// Provide an empty DDL reader list.
return compileCatalogInternal(database, DDLPathsToReaderList(), jarOutput);
}
/**
* Read the project file and get the database object.
* @param projectFileURL project file URL/path
* @return database for project or null
*/
private DatabaseType getProjectDatabase(final VoltCompilerReader projectReader)
{
DatabaseType database = null;
m_currentFilename = (projectReader != null ? projectReader.getName() : "null");
if (projectReader != null) {
try {
JAXBContext jc = JAXBContext.newInstance("org.voltdb.compiler.projectfile");
// This schema shot the sheriff.
SchemaFactory sf = SchemaFactory.newInstance(
javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = sf.newSchema(this.getClass().getResource("ProjectFileSchema.xsd"));
Unmarshaller unmarshaller = jc.createUnmarshaller();
// But did not shoot unmarshaller!
unmarshaller.setSchema(schema);
@SuppressWarnings("unchecked")
JAXBElement<ProjectType> result = (JAXBElement<ProjectType>) unmarshaller.unmarshal(projectReader);
ProjectType project = result.getValue();
database = project.getDatabase();
}
catch (JAXBException e) {
// Convert some linked exceptions to more friendly errors.
if (e.getLinkedException() instanceof java.io.FileNotFoundException) {
addErr(e.getLinkedException().getMessage());
compilerLog.error(e.getLinkedException().getMessage());
}
else {
DeprecatedProjectElement deprecated = DeprecatedProjectElement.valueOf(e);
if( deprecated != null) {
addErr("Found deprecated XML element \"" + deprecated.name() + "\" in project.xml file, "
+ deprecated.getSuggestion());
addErr("Error schema validating project.xml file. " + e.getLinkedException().getMessage());
compilerLog.error("Found deprecated XML element \"" + deprecated.name() + "\" in project.xml file");
compilerLog.error(e.getMessage());
compilerLog.error(projectReader.getPath());
}
else if (e.getLinkedException() instanceof org.xml.sax.SAXParseException) {
addErr("Error schema validating project.xml file. " + e.getLinkedException().getMessage());
compilerLog.error("Error schema validating project.xml file: " + e.getLinkedException().getMessage());
compilerLog.error(e.getMessage());
compilerLog.error(projectReader.getPath());
}
else {
throw new RuntimeException(e);
}
}
}
catch (SAXException e) {
addErr("Error schema validating project.xml file. " + e.getMessage());
compilerLog.error("Error schema validating project.xml file. " + e.getMessage());
}
}
else {
// No project.xml - create a stub object.
database = new DatabaseType();
}
return database;
}
/**
* Internal method for compiling the catalog.
*
* @param database catalog-related info parsed from a project file
* @param ddlReaderList Reader objects for ddl files.
* @param jarOutput The in-memory jar to populate or null if the caller doesn't provide one.
* @return true if successful
*/
private Catalog compileCatalogInternal(
final DatabaseType database,
final List<VoltCompilerReader> ddlReaderList,
final InMemoryJarfile jarOutput)
{
// Compiler instance is reusable. Clear the cache.
cachedAddedClasses.clear();
m_catalog = new Catalog();
// Initialize the catalog for one cluster
m_catalog.execute("add / clusters cluster");
m_catalog.getClusters().get("cluster").setSecurityenabled(false);
if (database != null) {
final String databaseName = database.getName();
// schema does not verify that the database is named "database"
if (databaseName.equals("database") == false) {
return null; // error messaging handled higher up
}
// shutdown and make a new hsqldb
try {
compileDatabaseNode(database, ddlReaderList, jarOutput);
} catch (final VoltCompilerException e) {
return null;
}
}
assert(m_catalog != null);
// add epoch info to catalog
final int epoch = (int)(TransactionIdManager.getEpoch() / 1000);
m_catalog.getClusters().get("cluster").setLocalepoch(epoch);
// generate the catalog report and write it to disk
try {
m_report = ReportMaker.report(m_catalog, m_warnings);
File file = new File("catalog-report.html");
FileWriter fw = new FileWriter(file);
fw.write(m_report);
fw.close();
m_reportPath = file.getAbsolutePath();
} catch (IOException e) {
e.printStackTrace();
return null;
}
return m_catalog;
}
ProcInfoData getProcInfoOverride(final String procName) {
if (m_procInfoOverrides == null)
return null;
return m_procInfoOverrides.get(procName);
}
public Catalog getCatalog() {
return m_catalog;
}
public Database getCatalogDatabase() {
return m_catalog.getClusters().get("cluster").getDatabases().get("database");
}
private Database initCatalogDatabase() {
// create the database in the catalog
m_catalog.execute("add /clusters[cluster] databases database");
return getCatalogDatabase();
}
public static enum DdlProceduresToLoad
{
NO_DDL_PROCEDURES, ONLY_SINGLE_STATEMENT_PROCEDURES, ALL_DDL_PROCEDURES
}
/**
* Simplified interface for loading a ddl file with full support for VoltDB
* extensions (partitioning, procedures, export), but no support for "project file" input.
* This is, at least initially, only a back door to create a fully functional catalog for
* the purposes of planner unit testing.
* @param hsql an interface to the hsql frontend, initialized and potentially reused by the caller.
* @param whichProcs indicates which ddl-defined procedures to load: none, single-statement, or all
* @param ddlFilePaths schema file paths
* @throws VoltCompilerException
*/
public Catalog loadSchema(HSQLInterface hsql,
DdlProceduresToLoad whichProcs,
String... ddlFilePaths) throws VoltCompilerException
{
m_catalog = new Catalog();
m_catalog.execute("add / clusters cluster");
Database db = initCatalogDatabase();
List<VoltCompilerReader> ddlReaderList = DDLPathsToReaderList(ddlFilePaths);
final VoltDDLElementTracker voltDdlTracker = new VoltDDLElementTracker(this);
InMemoryJarfile jarOutput = new InMemoryJarfile();
compileDatabase(db, hsql, voltDdlTracker, ddlReaderList, null, null, whichProcs, jarOutput);
return m_catalog;
}
/**
* Load a ddl file with full support for VoltDB extensions (partitioning, procedures,
* export), AND full support for input via a project xml file's "database" node.
* @param database catalog-related info parsed from a project file
* @param ddlReaderList Reader objects for ddl files.
* @param jarOutput The in-memory jar to populate or null if the caller doesn't provide one.
* @throws VoltCompilerException
*/
private void compileDatabaseNode(
final DatabaseType database,
final List<VoltCompilerReader> ddlReaderList,
final InMemoryJarfile jarOutput)
throws VoltCompilerException
{
final ArrayList<Class<?>> classDependencies = new ArrayList<Class<?>>();
final VoltDDLElementTracker voltDdlTracker = new VoltDDLElementTracker(this);
Database db = initCatalogDatabase();
// schemas/schema
if (database.getSchemas() != null) {
for (SchemasType.Schema schema : database.getSchemas().getSchema()) {
compilerLog.l7dlog( Level.INFO, LogKeys.compiler_VoltCompiler_CatalogPath.name(),
new Object[] {schema.getPath()}, null);
// Prefer to use the in-memory copy.
// All ddl.sql is placed in the jar root folder.
File schemaFile = new File(schema.getPath());
String schemaName = schemaFile.getName();
if (jarOutput != null && jarOutput.containsKey(schemaName)) {
ddlReaderList.add(new VoltCompilerJarFileReader(jarOutput, schemaName));
}
else {
ddlReaderList.add(createDDLFileReader(schema.getPath()));
}
}
}
// groups/group (alias for roles/role).
if (database.getGroups() != null) {
for (GroupsType.Group group : database.getGroups().getGroup()) {
org.voltdb.catalog.Group catGroup = db.getGroups().add(group.getName());
catGroup.setAdhoc(group.isAdhoc());
catGroup.setSysproc(group.isSysproc());
catGroup.setDefaultproc(group.isDefaultproc());
}
}
// roles/role (alias for groups/group).
if (database.getRoles() != null) {
for (RolesType.Role role : database.getRoles().getRole()) {
org.voltdb.catalog.Group catGroup = db.getGroups().add(role.getName());
catGroup.setAdhoc(role.isAdhoc());
catGroup.setSysproc(role.isSysproc());
catGroup.setDefaultproc(role.isDefaultproc());
}
}
// procedures/procedure
if (database.getProcedures() != null) {
for (ProceduresType.Procedure proc : database.getProcedures().getProcedure()) {
voltDdlTracker.add(getProcedure(proc));
}
}
// classdependencies/classdependency
if (database.getClassdependencies() != null) {
for (Classdependency dep : database.getClassdependencies().getClassdependency()) {
classDependencies.add(getClassDependency(dep));
}
}
// partitions/table
if (database.getPartitions() != null) {
for (PartitionsType.Partition table : database.getPartitions().getPartition()) {
voltDdlTracker.put(table.getTable(), table.getColumn());
}
}
// shutdown and make a new hsqldb
HSQLInterface hsql = HSQLInterface.loadHsqldb();
compileDatabase(db, hsql, voltDdlTracker, ddlReaderList, database.getExport(), classDependencies,
DdlProceduresToLoad.ALL_DDL_PROCEDURES, jarOutput);
}
/**
* Common code for schema loading shared by loadSchema and compileDatabaseNode
*
* @param db the database entry in the catalog
* @param hsql an interface to the hsql frontend, initialized and potentially reused by the caller.
* @param voltDdlTracker non-standard VoltDB schema annotations, initially those from a project file
* @param schemas the ddl input files
* @param export optional export connector configuration (from the project file)
* @param classDependencies optional additional jar files required by procedures
* @param whichProcs indicates which ddl-defined procedures to load: none, single-statement, or all
* @param jarOutput The in-memory jar to populate or null if the caller doesn't provide one.
*/
private void compileDatabase(
Database db,
HSQLInterface hsql,
VoltDDLElementTracker voltDdlTracker,
List<VoltCompilerReader> schemaReaders,
ExportType export,
Collection<Class<?>> classDependencies,
DdlProceduresToLoad whichProcs,
InMemoryJarfile jarOutput)
throws VoltCompilerException
{
// Actually parse and handle all the DDL
// DDLCompiler also provides partition descriptors for DDL PARTITION
// and REPLICATE statements.
final DDLCompiler ddlcompiler = new DDLCompiler(this, hsql, voltDdlTracker, m_classLoader);
for (final VoltCompilerReader schemaReader : schemaReaders) {
// add the file object's path to the list of files for the jar
m_ddlFilePaths.put(schemaReader.getName(), schemaReader.getPath());
ddlcompiler.loadSchema(schemaReader, db, whichProcs);
}
ddlcompiler.compileToCatalog(db);
// Actually parse and handle all the partitions
// this needs to happen before procedures are compiled
String msg = "In database, ";
final CatalogMap<Table> tables = db.getTables();
for (Table table: tables) {
String tableName = table.getTypeName();
if (voltDdlTracker.m_partitionMap.containsKey(tableName.toLowerCase())) {
String colName = voltDdlTracker.m_partitionMap.get(tableName.toLowerCase());
// A null column name indicates a replicated table. Ignore it here
// because it defaults to replicated in the catalog.
if (colName != null) {
assert(tables.getIgnoreCase(tableName) != null);
final Column partitionCol = table.getColumns().getIgnoreCase(colName);
// make sure the column exists
if (partitionCol == null) {
msg += "PARTITION has unknown COLUMN '" + colName + "'";
throw new VoltCompilerException(msg);
}
// make sure the column is marked not-nullable
if (partitionCol.getNullable() == true) {
msg += "Partition column '" + tableName + "." + colName + "' is nullable. " +
"Partition columns must be constrained \"NOT NULL\".";
throw new VoltCompilerException(msg);
}
// verify that the partition column is a supported type
VoltType pcolType = VoltType.get((byte) partitionCol.getType());
switch (pcolType) {
case TINYINT:
case SMALLINT:
case INTEGER:
case BIGINT:
case STRING:
case VARBINARY:
break;
default:
msg += "Partition column '" + tableName + "." + colName + "' is not a valid type. " +
"Partition columns must be an integer or varchar type.";
throw new VoltCompilerException(msg);
}
table.setPartitioncolumn(partitionCol);
table.setIsreplicated(false);
// Check valid indexes, whether they contain the partition column or not.
for (Index index: table.getIndexes()) {
checkValidPartitionTableIndex(index, partitionCol, tableName);
}
// Set the partitioning of destination tables of associated views.
// If a view's source table is replicated, then a full scan of the
// associated view is single-sited. If the source is partitioned,
// a full scan of the view must be distributed, unless it is filtered
// by the original table's partitioning key, which, to be filtered,
// must also be a GROUP BY key.
final CatalogMap<MaterializedViewInfo> views = table.getViews();
for (final MaterializedViewInfo mvi : views) {
mvi.getDest().setIsreplicated(false);
setGroupedTablePartitionColumn(mvi, partitionCol);
}
}
} else {
// Replicated tables case.
for (Index index: table.getIndexes()) {
if (index.getAssumeunique()) {
String exceptionMsg = String.format(
"ASSUMEUNIQUE is not valid for replicated tables. Please use UNIQUE instead");
throw new VoltCompilerException(exceptionMsg);
}
}
}
}
// add database estimates info
addDatabaseEstimatesInfo(m_estimates, db);
// Process DDL exported tables
for( String exportedTableName: voltDdlTracker.getExportedTables()) {
addExportTableToConnector(exportedTableName, db);
}
// Process and add exports and connectors to the catalog
// Must do this before compiling procedures to deny updates
// on append-only tables.
if (export != null) {
// currently, only a single connector is allowed
compileExport(export, db);
}
if (whichProcs != DdlProceduresToLoad.NO_DDL_PROCEDURES) {
Collection<ProcedureDescriptor> allProcs = voltDdlTracker.getProcedureDescriptors();
compileProcedures(db, hsql, allProcs, classDependencies, whichProcs, jarOutput);
}
// add extra classes from the DDL
m_addedClasses = voltDdlTracker.m_extraClassses.toArray(new String[0]);
addExtraClasses(jarOutput);
}
private void checkValidPartitionTableIndex(Index index, Column partitionCol, String tableName)
throws VoltCompilerException {
// skip checking for non-unique indexes.
if (!index.getUnique()) {
return;
}
boolean containsPartitionColumn = false;
String jsonExpr = index.getExpressionsjson();
// if this is a pure-column index...
if (jsonExpr.isEmpty()) {
for (ColumnRef cref : index.getColumns()) {
Column col = cref.getColumn();
// unique index contains partitioned column
if (col.equals(partitionCol)) {
containsPartitionColumn = true;
break;
}
}
}
// if this is a fancy expression-based index...
else {
try {
int partitionColIndex = partitionCol.getIndex();
List<AbstractExpression> indexExpressions = AbstractExpression.fromJSONArrayString(jsonExpr, null);
for (AbstractExpression expr: indexExpressions) {
if (expr instanceof TupleValueExpression &&
((TupleValueExpression) expr).getColumnIndex() == partitionColIndex ) {
containsPartitionColumn = true;
break;
}
}
} catch (JSONException e) {
e.printStackTrace(); // danger will robinson
assert(false);
}
}
if (containsPartitionColumn) {
if (index.getAssumeunique()) {
String exceptionMsg = String.format("ASSUMEUNIQUE is not valid " +
"for an index that includes the partitioning column. Please use UNIQUE instead.");
throw new VoltCompilerException(exceptionMsg);
}
}
else if ( ! index.getAssumeunique()) {
// Throw compiler exception.
String indexName = index.getTypeName();
String keyword = "";
if (indexName.startsWith(HSQLInterface.AUTO_GEN_PRIMARY_KEY_PREFIX)) {
indexName = "PRIMARY KEY";
keyword = "PRIMARY KEY";
} else {
indexName = "UNIQUE INDEX " + indexName;
keyword = "UNIQUE";
}
String exceptionMsg = "Invalid use of " + keyword +
". The " + indexName + " on the partitioned table " + tableName +
" does not include the partitioning column " + partitionCol.getName() +
". See the documentation for the 'CREATE TABLE' and 'CREATE INDEX' commands and the 'ASSUMEUNIQUE' keyword.";
throw new VoltCompilerException(exceptionMsg);
}
}
/**
* Once the DDL file is over, take all of the extra classes found and add them to the jar.
*/
private void addExtraClasses(final InMemoryJarfile jarOutput) throws VoltCompilerException {
List<String> addedClasses = new ArrayList<String>();
for (String className : m_addedClasses) {
/*
* Only add the class if it isn't already in the output jar.
* The jar will be pre-populated when performing an automatic
* catalog version upgrade.
*/
if (!jarOutput.containsKey(className)) {
try {
Class<?> clz = Class.forName(className, true, m_classLoader);
if (addClassToJar(jarOutput, clz)) {
addedClasses.add(className);
}
}
catch (Exception e) {
String msg = "Class %s could not be loaded/found/added to the jar.";
msg = String.format(msg, className);
throw new VoltCompilerException(msg);
}
// reset the added classes to the actual added classes
}
}
m_addedClasses = addedClasses.toArray(new String[0]);
}
/**
* @param db the database entry in the catalog
* @param hsql an interface to the hsql frontend, initialized and potentially reused by the caller.
* @param classDependencies
* @param voltDdlTracker non-standard VoltDB schema annotations
* @param whichProcs indicates which ddl-defined procedures to load: none, single-statement, or all
* @throws VoltCompilerException
*/
private void compileProcedures(Database db,
HSQLInterface hsql,
Collection<ProcedureDescriptor> allProcs,
Collection<Class<?>> classDependencies,
DdlProceduresToLoad whichProcs,
InMemoryJarfile jarOutput) throws VoltCompilerException
{
// Ignore class dependencies if ignoring java stored procs.
// This extra qualification anticipates some (undesirable) overlap between planner
// testing and additional library code in the catalog jar file.
// That is, if it became possible for ddl file syntax to trigger additional
// (non-stored-procedure) class loading into the catalog jar,
// planner-only testing would find it convenient to ignore those
// dependencies for its "dry run" on an unchanged application ddl file.
if (whichProcs == DdlProceduresToLoad.ALL_DDL_PROCEDURES) {
// Add all the class dependencies to the output jar
for (final Class<?> classDependency : classDependencies) {
addClassToJar(jarOutput, classDependency);
}
}
// Generate the auto-CRUD procedure descriptors. This creates
// procedure descriptors to insert, delete, select and update
// tables, with some caveats. (See ENG-1601).
final List<ProcedureDescriptor> procedures = generateCrud();
procedures.addAll(allProcs);
// Actually parse and handle all the Procedures
for (final ProcedureDescriptor procedureDescriptor : procedures) {
final String procedureName = procedureDescriptor.m_className;
if (procedureDescriptor.m_singleStmt == null) {
m_currentFilename = procedureName.substring(procedureName.lastIndexOf('.') + 1);
m_currentFilename += ".class";
}
else if (whichProcs == DdlProceduresToLoad.ONLY_SINGLE_STATEMENT_PROCEDURES) {
// In planner test mode, especially within the plannerTester framework,
// ignore any java procedures referenced in ddl CREATE PROCEDURE statements to allow
// re-use of actual application ddl files without introducing class dependencies.
// This potentially allows automatic plannerTester regression test support
// for all the single-statement procedures of an unchanged application ddl file.
continue;
}
else {
m_currentFilename = procedureName;
}
ProcedureCompiler.compile(this, hsql, m_estimates, m_catalog, db, procedureDescriptor, jarOutput);
}
// done handling files
m_currentFilename = null;
}
private void setGroupedTablePartitionColumn(MaterializedViewInfo mvi, Column partitionColumn)
throws VoltCompilerException {
// A view of a replicated table is replicated.
// A view of a partitioned table is partitioned -- regardless of whether it has a partition key
// -- it certainly isn't replicated!
// If the partitioning column is grouped, its counterpart is the partitioning column of the view table.
// Otherwise, the view table just doesn't have a partitioning column
// -- it is seemingly randomly distributed,
// and its grouped columns are only locally unique but not globally unique.
Table destTable = mvi.getDest();
// Get the grouped columns in "index" order.
// This order corresponds to the iteration order of the MaterializedViewInfo's getGroupbycols.
List<Column> destColumnArray = CatalogUtil.getSortedCatalogItems(destTable.getColumns(), "index");
String partitionColName = partitionColumn.getTypeName(); // Note getTypeName gets the column name -- go figure.
int index = 0;
for (ColumnRef cref : CatalogUtil.getSortedCatalogItems(mvi.getGroupbycols(), "index")) {
Column srcCol = cref.getColumn();
if (srcCol.getName().equals(partitionColName)) {
Column destCol = destColumnArray.get(index);
destTable.setPartitioncolumn(destCol);
return;
}
++index;
}
}
/** Provide a feedback path to monitor plan output via harvestCapturedDetail */
public void enableDetailedCapture() {
m_capturedDiagnosticDetail = new ArrayList<String>();
}
/** Access recent plan output, for diagnostic purposes */
public List<String> harvestCapturedDetail() {
List<String> harvested = m_capturedDiagnosticDetail;
m_capturedDiagnosticDetail = null;
return harvested;
}
/** Capture plan context info -- statement, cost, high-level "explain". */
public void captureDiagnosticContext(String planDescription) {
if (m_capturedDiagnosticDetail == null) {
return;
}
m_capturedDiagnosticDetail.add(planDescription);
}
/** Capture plan content in terse json format. */
public void captureDiagnosticJsonFragment(String json) {
if (m_capturedDiagnosticDetail == null) {
return;
}
m_capturedDiagnosticDetail.add(json);
}
/**
* Create INSERT, UPDATE, DELETE and SELECT procedure descriptors for all partitioned,
* non-export tables with primary keys that include the partitioning column.
*
* @param catalog
* @return a list of new procedure descriptors
*/
private List<ProcedureDescriptor> generateCrud() {
final LinkedList<ProcedureDescriptor> crudprocs = new LinkedList<ProcedureDescriptor>();
final Database db = getCatalogDatabase();
for (Table table : db.getTables()) {
if (CatalogUtil.isTableExportOnly(db, table)) {
compilerLog.debug("Skipping creation of CRUD procedures for export-only table " +
table.getTypeName());
continue;
}
if (table.getMaterializer() != null) {
compilerLog.debug("Skipping creation of CRUD procedures for view " +
table.getTypeName());
continue;
}
// select/delete/update crud requires pkey. Pkeys are stored as constraints.
final CatalogMap<Constraint> constraints = table.getConstraints();
final Iterator<Constraint> it = constraints.iterator();
Constraint pkey = null;
while (it.hasNext()) {
Constraint constraint = it.next();
if (constraint.getType() == ConstraintType.PRIMARY_KEY.getValue()) {
pkey = constraint;
break;
}
}
if (table.getIsreplicated()) {
if (pkey != null) {
compilerLog.debug("Creating multi-partition insert/delete/update procedures for replicated table " +
table.getTypeName());
crudprocs.add(generateCrudReplicatedInsert(table));
crudprocs.add(generateCrudReplicatedDelete(table, pkey));
crudprocs.add(generateCrudReplicatedUpdate(table, pkey));
}
else {
compilerLog.debug("Creating multi-partition insert procedures for replicated table " +
table.getTypeName());
crudprocs.add(generateCrudReplicatedInsert(table));
}
continue;
}
// get the partition column
final Column partitioncolumn = table.getPartitioncolumn();
// all partitioned tables get insert crud procs
crudprocs.add(generateCrudInsert(table, partitioncolumn));
if (pkey == null) {
compilerLog.debug("Skipping creation of CRUD select/delete/update for partitioned table " +
table.getTypeName() + " because no primary key is declared.");
continue;
}
// Primary key must include the partition column for the table
// for select/delete/update
boolean pkeyHasPartitionColumn = false;
CatalogMap<ColumnRef> pkeycols = pkey.getIndex().getColumns();
Iterator<ColumnRef> pkeycolsit = pkeycols.iterator();
while (pkeycolsit.hasNext()) {
ColumnRef colref = pkeycolsit.next();
if (colref.getColumn().equals(partitioncolumn)) {
pkeyHasPartitionColumn = true;
break;
}
}
if (!pkeyHasPartitionColumn) {
compilerLog.debug("Skipping creation of CRUD select/delete/update for partitioned table " +
table.getTypeName() + " because primary key does not include the partitioning column.");
continue;
}
// select, delete and updarte here (insert generated above)
crudprocs.add(generateCrudSelect(table, partitioncolumn, pkey));
crudprocs.add(generateCrudDelete(table, partitioncolumn, pkey));
crudprocs.add(generateCrudUpdate(table, partitioncolumn, pkey));
}
return crudprocs;
}
/** Helper to sort table columns by table column order */
private static class TableColumnComparator implements Comparator<Column> {
public TableColumnComparator() {
}
@Override
public int compare(Column o1, Column o2) {
return o1.getIndex() - o2.getIndex();
}
}
/** Helper to sort index columnrefs by index column order */
private static class ColumnRefComparator implements Comparator<ColumnRef> {
public ColumnRefComparator() {
}
@Override
public int compare(ColumnRef o1, ColumnRef o2) {
return o1.getIndex() - o2.getIndex();
}
}
/**
* Helper to generate a WHERE pkey_col1 = ?, pkey_col2 = ? ...; clause.
* @param partitioncolumn partitioning column for the table
* @param pkey constraint from the catalog
* @param paramoffset 0-based counter of parameters in the full sql statement so far
* @param sb string buffer accumulating the sql statement
* @return offset in the index of the partition column
*/
private int generateCrudPKeyWhereClause(Column partitioncolumn,
Constraint pkey, StringBuilder sb)
{
// Sort the catalog index columns by index column order.
ArrayList<ColumnRef> indexColumns = new ArrayList<ColumnRef>(pkey.getIndex().getColumns().size());
for (ColumnRef c : pkey.getIndex().getColumns()) {
indexColumns.add(c);
}
Collections.sort(indexColumns, new ColumnRefComparator());
boolean first = true;
int partitionOffset = -1;
sb.append(" WHERE ");
for (ColumnRef pkc : indexColumns) {
if (!first) sb.append(" AND ");
first = false;
sb.append("(" + pkc.getColumn().getName() + " = ?" + ")");
if (pkc.getColumn() == partitioncolumn) {
partitionOffset = pkc.getIndex();
}
}
sb.append(";");
return partitionOffset;
}
/**
* Helper to generate a full col1 = ?, col2 = ?... clause.
* @param table
* @param sb
*/
private void generateCrudExpressionColumns(Table table, StringBuilder sb) {
boolean first = true;
// Sort the catalog table columns by column order.
ArrayList<Column> tableColumns = new ArrayList<Column>(table.getColumns().size());
for (Column c : table.getColumns()) {
tableColumns.add(c);
}
Collections.sort(tableColumns, new TableColumnComparator());
for (Column c : tableColumns) {
if (!first) sb.append(", ");
first = false;
sb.append(c.getName() + " = ?");
}
}
/**
* Helper to generate a full col1, col2, col3 list.
*/
private void generateCrudColumnList(Table table, StringBuilder sb) {
boolean first = true;
sb.append("(");
// Sort the catalog table columns by column order.
ArrayList<Column> tableColumns = new ArrayList<Column>(table.getColumns().size());
for (Column c : table.getColumns()) {
tableColumns.add(c);
}
Collections.sort(tableColumns, new TableColumnComparator());
// Output the SQL column list.
for (Column c : tableColumns) {
assert (c.getIndex() >= 0); // mostly mask unused 'c'.
if (!first) sb.append(", ");
first = false;
sb.append("?");
}
sb.append(")");
}
/**
* Create a statement like:
* "delete from <table> where {<pkey-column =?>...}"
*/
private ProcedureDescriptor generateCrudDelete(Table table,
Column partitioncolumn, Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("DELETE FROM " + table.getTypeName());
int partitionOffset =
generateCrudPKeyWhereClause(partitioncolumn, pkey, sb);
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitionOffset;
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".delete", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true, // builtin statement
null, // language type for embedded scripts
null, // script implementation
null); // code block script class
return pd;
}
/**
* Create a statement like:
* "update <table> set {<each-column = ?>...} where {<pkey-column = ?>...}
*/
private ProcedureDescriptor generateCrudUpdate(Table table,
Column partitioncolumn, Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("UPDATE " + table.getTypeName() + " SET ");
generateCrudExpressionColumns(table, sb);
generateCrudPKeyWhereClause(partitioncolumn, pkey, sb);
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitioncolumn.getIndex();
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".update", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true, // builtin statement
null, // language type for embedded scripts
null, // script implementation
null); // code block script class
return pd;
}
/**
* Create a statement like:
* "insert into <table> values (?, ?, ...);"
*/
private ProcedureDescriptor generateCrudInsert(Table table,
Column partitioncolumn)
{
StringBuilder sb = new StringBuilder();
sb.append("INSERT INTO " + table.getTypeName() + " VALUES ");
generateCrudColumnList(table, sb);
sb.append(";");
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitioncolumn.getIndex();
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".insert", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true, // builtin statement
null, // language type for embedded scripts
null, // script implementation
null); // code block script class
return pd;
}
/**
* Create a statement like:
* "insert into <table> values (?, ?, ...);"
* for a replicated table.
*/
private ProcedureDescriptor generateCrudReplicatedInsert(Table table) {
StringBuilder sb = new StringBuilder();
sb.append("INSERT INTO " + table.getTypeName() + " VALUES ");
generateCrudColumnList(table, sb);
sb.append(";");
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".insert", // className
sb.toString(), // singleStmt
null, // joinOrder
null, // table.column:offset
true, // builtin statement
null, // language type for embedded scripts
null, // script implementation
null); // code block script class
return pd;
}
/**
* Create a statement like:
* "update <table> set {<each-column = ?>...} where {<pkey-column = ?>...}
* for a replicated table.
*/
private ProcedureDescriptor generateCrudReplicatedUpdate(Table table,
Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("UPDATE " + table.getTypeName() + " SET ");
generateCrudExpressionColumns(table, sb);
generateCrudPKeyWhereClause(null, pkey, sb);
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".update", // className
sb.toString(), // singleStmt
null, // joinOrder
null, // table.column:offset
true, // builtin statement
null, // language type for embedded scripts
null, // script implementation
null); // code block script class
return pd;
}
/**
* Create a statement like:
* "delete from <table> where {<pkey-column =?>...}"
* for a replicated table.
*/
private ProcedureDescriptor generateCrudReplicatedDelete(Table table,
Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("DELETE FROM " + table.getTypeName());
generateCrudPKeyWhereClause(null, pkey, sb);
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".delete", // className
sb.toString(), // singleStmt
null, // joinOrder
null, // table.column:offset
true, // builtin statement
null, // language type for embedded scripts
null, // script implementation
null); // code block script class
return pd;
}
/**
* Create a statement like:
* "select * from <table> where pkey_col1 = ?, pkey_col2 = ? ... ;"
*/
private ProcedureDescriptor generateCrudSelect(Table table,
Column partitioncolumn, Constraint pkey)
{
StringBuilder sb = new StringBuilder();
sb.append("SELECT * FROM " + table.getTypeName());
int partitionOffset =
generateCrudPKeyWhereClause(partitioncolumn, pkey, sb);
String partitioninfo =
table.getTypeName() + "." + partitioncolumn.getName() + ":" + partitionOffset;
ProcedureDescriptor pd =
new ProcedureDescriptor(
new ArrayList<String>(), // groups
table.getTypeName() + ".select", // className
sb.toString(), // singleStmt
null, // joinOrder
partitioninfo, // table.column:offset
true, // builtin statement
null, // language type for embedded scripts
null, // script implementation
null); // code block script class
return pd;
}
static void addDatabaseEstimatesInfo(final DatabaseEstimates estimates, final Database db) {
// Not implemented yet. Don't panic.
/*for (Table table : db.getTables()) {
DatabaseEstimates.TableEstimates tableEst = new DatabaseEstimates.TableEstimates();
tableEst.maxTuples = 1000000;
tableEst.minTuples = 100000;
estimates.tables.put(table, tableEst);
}*/
}
ProcedureDescriptor getProcedure(
org.voltdb.compiler.projectfile.ProceduresType.Procedure xmlproc)
throws VoltCompilerException
{
final ArrayList<String> groups = new ArrayList<String>();
// @groups
if (xmlproc.getGroups() != null) {
for (String group : xmlproc.getGroups().split(",")) {
groups.add(group);
}
}
// @class
String classattr = xmlproc.getClazz();
// If procedure/sql is present, this is a "statement procedure"
if (xmlproc.getSql() != null) {
String partattr = xmlproc.getPartitioninfo();
// null partattr means multi-partition
// set empty attributes to multi-partition
if (partattr != null && partattr.length() == 0)
partattr = null;
return new ProcedureDescriptor(groups, classattr,
xmlproc.getSql().getValue(),
xmlproc.getSql().getJoinorder(),
partattr, false, null, null, null);
}
else {
String partattr = xmlproc.getPartitioninfo();
if (partattr != null) {
String msg = "Java procedures must specify partition info using " +
"@ProcInfo annotation in the Java class implementation " +
"and may not use the @partitioninfo project file procedure attribute.";
throw new VoltCompilerException(msg);
}
Class<?> clazz;
try {
clazz = Class.forName(classattr, true, m_classLoader);
} catch (ClassNotFoundException e) {
throw new VoltCompilerException(String.format(
"Cannot load class for procedure: %s",
classattr));
}
return new ProcedureDescriptor(groups, Language.JAVA, null, clazz);
}
}
Class<?> getClassDependency(Classdependency xmlclassdep)
throws VoltCompilerException
{
String msg = "";
String className = xmlclassdep.getClazz();
// schema doesn't currently enforce this.. but could I guess.
if (className.length() == 0) {
msg += "\"classDependency\" element has empty \"class\" attribute.";
throw new VoltCompilerException(msg);
}
Class<?> cls = null;
try {
cls = Class.forName(className, true, m_classLoader);
} catch (final ClassNotFoundException e) {
msg += "\"classDependency\" can not find class " + className + " in classpath";
throw new VoltCompilerException(msg);
}
return cls;
}
private void compileExport(final ExportType export, final Database catdb)
throws VoltCompilerException
{
// Test the error paths before touching the catalog
if (export == null) {
return;
}
// Catalog Connector
// Relying on schema's enforcement of at most 1 connector
// This check is also done here to mimic the same behavior of the
// previous implementation of this method, where the connector is created as
// long as the export element is present in project XML. Now that we are
// deprecating project.xml, we won't be able to mimic in DDL, what an
// empty <export/> element currently implies.
org.voltdb.catalog.Connector catconn = catdb.getConnectors().getIgnoreCase("0");
if (catconn == null) {
catconn = catdb.getConnectors().add("0");
}
// Catalog Connector.ConnectorTableInfo
if (export.getTables() != null) {
for (Tables.Table xmltable : export.getTables().getTable()) {
addExportTableToConnector(xmltable.getName(), catdb);
}
if (export.getTables().getTable().isEmpty()) {
compilerLog.warn("Export defined with an empty <tables> element");
}
} else {
compilerLog.warn("Export defined with no <tables> element");
}
}
void addExportTableToConnector( final String tableName, final Database catdb)
throws VoltCompilerException
{
assert tableName != null && ! tableName.trim().isEmpty() && catdb != null;
// Catalog Connector
// Relying on schema's enforcement of at most 1 connector
org.voltdb.catalog.Connector catconn = catdb.getConnectors().getIgnoreCase("0");
if (catconn == null) {
catconn = catdb.getConnectors().add("0");
}
org.voltdb.catalog.Table tableref = catdb.getTables().getIgnoreCase(tableName);
if (tableref == null) {
throw new VoltCompilerException("While configuring export, table " + tableName + " was not present in " +
"the catalog.");
}
if (CatalogUtil.isTableMaterializeViewSource(catdb, tableref)) {
compilerLog.error("While configuring export, table " + tableName + " is a source table " +
"for a materialized view. Export only tables do not support views.");
throw new VoltCompilerException("Export table configured with materialized view.");
}
if (tableref.getMaterializer() != null)
{
compilerLog.error("While configuring export, table " + tableName + " is a " +
"materialized view. A view cannot be an export table.");
throw new VoltCompilerException("View configured as an export table");
}
if (tableref.getIndexes().size() > 0) {
compilerLog.error("While configuring export, table " + tableName + " has indexes defined. " +
"Export tables can't have indexes (including primary keys).");
throw new VoltCompilerException("Table with indexes configured as an export table");
}
if (tableref.getIsreplicated()) {
// if you don't specify partition columns, make
// export tables partitioned, but on no specific column (iffy)
tableref.setIsreplicated(false);
tableref.setPartitioncolumn(null);
}
org.voltdb.catalog.ConnectorTableInfo connTableInfo =
catconn.getTableinfo().getIgnoreCase(tableName);
if (connTableInfo == null) {
connTableInfo = catconn.getTableinfo().add(tableName);
connTableInfo.setTable(tableref);
connTableInfo.setAppendonly(true);
}
else {
throw new VoltCompilerException(String.format(
"Table \"%s\" is already exported", tableName
));
}
}
// Usage messages for new and legacy syntax.
static final String usageNew = "VoltCompiler <output-JAR> <input-DDL> ...";
static final String usageLegacy = "VoltCompiler <project-file> <output-JAR>";
/**
* Main
*
* Incoming arguments:
*
* New syntax: OUTPUT_JAR INPUT_DDL ...
* Legacy syntax: PROJECT_FILE OUTPUT_JAR
*
* @param args arguments (see above)
*/
public static void main(final String[] args)
{
final VoltCompiler compiler = new VoltCompiler();
boolean success = false;
if (args.length > 0 && args[0].toLowerCase().endsWith(".jar")) {
// The first argument is *.jar for the new syntax.
if (args.length >= 2) {
// Check for accidental .jar or .xml files specified for argument 2
// to catch accidental incomplete use of the legacy syntax.
if (args[1].toLowerCase().endsWith(".xml") || args[1].toLowerCase().endsWith(".jar")) {
System.err.println("Error: Expecting a DDL file as the second argument.\n"
+ " .xml and .jar are invalid DDL file extensions.");
System.exit(-1);
}
try {
success = compiler.compileFromDDL(args[0], ArrayUtils.subarray(args, 1, args.length));
} catch (VoltCompilerException e) {
System.err.printf("Compiler exception: %s\n", e.getMessage());
}
}
else {
System.err.printf("Usage: %s\n", usageNew);
System.exit(-1);
}
}
else if (args.length > 0 && args[0].toLowerCase().endsWith(".xml")) {
// The first argument is *.xml for the legacy syntax.
if (args.length == 2) {
success = compiler.compileWithProjectXML(args[0], args[1]);
}
else {
System.err.printf("Usage: %s\n", usageLegacy);
System.exit(-1);
}
}
else {
// Can't recognize the arguments or there are no arguments.
System.err.printf("Usage: %s\n %s\n", usageNew, usageLegacy);
System.exit(-1);
}
// Should have exited if inadequate arguments were provided.
assert(args.length > 0);
// Exit with error code if we failed
if (!success) {
compiler.summarizeErrors(System.out, null);
System.exit(-1);
}
compiler.summarizeSuccess(System.out, null, args[0]);
}
public void summarizeSuccess(PrintStream outputStream, PrintStream feedbackStream, String jarOutputPath) {
if (outputStream != null) {
Database database = getCatalogDatabase();
outputStream.println("
outputStream.println("Successfully created " + jarOutputPath);
for (String ddl : m_ddlFilePaths.keySet()) {
outputStream.println("Includes schema: " + m_ddlFilePaths.get(ddl));
}
outputStream.println();
// Accumulate a summary of the summary for a briefer report
ArrayList<Procedure> nonDetProcs = new ArrayList<Procedure>();
ArrayList<Procedure> tableScans = new ArrayList<Procedure>();
int countSinglePartition = 0;
int countMultiPartition = 0;
int countDefaultProcs = 0;
for (Procedure p : database.getProcedures()) {
if (p.getSystemproc()) {
continue;
}
// Aggregate statistics about MP/SP/SEQ
if (!p.getDefaultproc()) {
if (p.getSinglepartition()) {
countSinglePartition++;
}
else {
countMultiPartition++;
}
}
else {
countDefaultProcs++;
}
if (p.getHasseqscans()) {
tableScans.add(p);
}
outputStream.printf("[%s][%s] %s\n",
p.getSinglepartition() ? "SP" : "MP",
p.getReadonly() ? "READ" : "WRITE",
p.getTypeName());
for (Statement s : p.getStatements()) {
String seqScanTag = "";
if (s.getSeqscancount() > 0) {
seqScanTag = "[TABLE SCAN] ";
}
String determinismTag = "";
// if the proc is a java stored proc that is read&write,
// output determinism warnings
if (p.getHasjava() && (!p.getReadonly())) {
if (s.getIscontentdeterministic() == false) {
determinismTag = "[NDC] ";
nonDetProcs.add(p);
}
else if (s.getIsorderdeterministic() == false) {
determinismTag = "[NDO] ";
nonDetProcs.add(p);
}
}
String statementLine;
String sqlText = s.getSqltext();
sqlText = squeezeWhitespace(sqlText);
if (seqScanTag.length() + determinismTag.length() + sqlText.length() > 80) {
statementLine = " " + (seqScanTag + determinismTag + sqlText).substring(0, 80) + "...";
} else {
statementLine = " " + seqScanTag + determinismTag + sqlText;
}
outputStream.println(statementLine);
}
outputStream.println();
}
outputStream.println("
if (m_addedClasses.length > 0) {
if (m_addedClasses.length > 10) {
outputStream.printf("Added %d additional classes to the catalog jar.\n\n",
m_addedClasses.length);
}
else {
String logMsg = "Added the following additional classes to the catalog jar:\n";
for (String className : m_addedClasses) {
logMsg += " " + className + "\n";
}
outputStream.println(logMsg);
}
outputStream.println("
}
// post-compile summary and legend.
outputStream.printf(
"Catalog contains %d built-in CRUD procedures.\n" +
"\tSimple insert, update, delete and select procedures are created\n" +
"\tautomatically for convenience.\n\n",
countDefaultProcs);
if (countSinglePartition > 0) {
outputStream.printf(
"[SP] Catalog contains %d single partition procedures.\n" +
"\tSingle partition procedures run in parallel and scale\n" +
"\tas partitions are added to a cluster.\n\n",
countSinglePartition);
}
if (countMultiPartition > 0) {
outputStream.printf(
"[MP] Catalog contains %d multi-partition procedures.\n" +
"\tMulti-partition procedures run globally at all partitions\n" +
"\tand do not run in parallel with other procedures.\n\n",
countMultiPartition);
}
if (!tableScans.isEmpty()) {
outputStream.printf("[TABLE SCAN] Catalog contains %d procedures that use a table scan:\n\n",
tableScans.size());
for (Procedure p : tableScans) {
outputStream.println("\t\t" + p.getClassname());
}
outputStream.printf(
"\n\tTable scans do not use indexes and may become slower as tables grow.\n\n");
}
if (!nonDetProcs.isEmpty()) {
outputStream.println(
"[NDO][NDC] NON-DETERMINISTIC CONTENT OR ORDER WARNING:\n" +
"\tThe procedures listed below contain non-deterministic queries.\n");
for (Procedure p : nonDetProcs) {
outputStream.println("\t\t" + p.getClassname());
}
outputStream.printf(
"\n" +
"\tUsing the output of these queries as input to subsequent\n" +
"\twrite queries can result in differences between replicated\n" +
"\tpartitions at runtime, forcing VoltDB to shutdown the cluster.\n" +
"\tReview the compiler messages above to identify the offending\n" +
"\tSQL statements (marked as \"[NDO] or [NDC]\"). Add a unique\n" +
"\tindex to the schema or an explicit ORDER BY clause to the\n" +
"\tquery to make these queries deterministic.\n\n");
}
if (countSinglePartition == 0 && countMultiPartition > 0) {
outputStream.printf(
"ALL MULTI-PARTITION WARNING:\n" +
"\tAll of the user procedures are multi-partition. This often\n" +
"\tindicates that the application is not utilizing VoltDB partitioning\n" +
"\tfor best performance. For information on VoltDB partitioning, see:\n"+
"\thttp://voltdb.com/docs/UsingVoltDB/ChapAppDesign.php\n\n");
}
if (m_reportPath != null) {
outputStream.println("
outputStream.println("Full catalog report can be found at file://" + m_reportPath + "\n" +
"\t or can be viewed at \"http://localhost:8080\" when the server is running.\n");
}
outputStream.println("
}
if (feedbackStream != null) {
for (Feedback fb : m_warnings) {
feedbackStream.println(fb.getLogString());
}
for (Feedback fb : m_infos) {
feedbackStream.println(fb.getLogString());
}
}
}
/**
* Return a copy of the input sqltext with each run of successive whitespace characters replaced by a single space.
* This is just for informal feedback purposes, so quoting is not respected.
* @param sqltext
* @return a possibly modified copy of the input sqltext
**/
private static String squeezeWhitespace(String sqltext) {
String compact = sqltext.replaceAll("\\s+", " ");
return compact;
}
public void summarizeErrors(PrintStream outputStream, PrintStream feedbackStream) {
if (outputStream != null) {
outputStream.println("
outputStream.println("Catalog compilation failed.");
outputStream.println("
}
if (feedbackStream != null) {
for (Feedback fb : m_errors) {
feedbackStream.println(fb.getLogString());
}
}
}
// this needs to be reset in the main compile func
private static final HashSet<Class<?>> cachedAddedClasses = new HashSet<Class<?>>();
private byte[] getClassAsBytes(final Class<?> c) throws IOException {
ClassLoader cl = c.getClassLoader();
if (cl == null) {
cl = Thread.currentThread().getContextClassLoader();
}
String classAsPath = c.getName().replace('.', '/') + ".class";
if (cl instanceof JarLoader) {
InMemoryJarfile memJar = ((JarLoader) cl).getInMemoryJarfile();
return memJar.get(classAsPath);
}
else {
BufferedInputStream cis = null;
ByteArrayOutputStream baos = null;
try {
cis = new BufferedInputStream(cl.getResourceAsStream(classAsPath));
baos = new ByteArrayOutputStream();
byte [] buf = new byte[1024];
int rsize = 0;
while ((rsize=cis.read(buf)) != -1) {
baos.write(buf, 0, rsize);
}
} finally {
try { if (cis != null) cis.close();} catch (Exception ignoreIt) {}
try { if (baos != null) baos.close();} catch (Exception ignoreIt) {}
}
return baos.toByteArray();
}
}
public List<Class<?>> getInnerClasses(Class <?> c)
throws VoltCompilerException {
ImmutableList.Builder<Class<?>> builder = ImmutableList.builder();
ClassLoader cl = c.getClassLoader();
if (cl == null) {
cl = Thread.currentThread().getContextClassLoader();
}
Log.info(cl.getClass().getCanonicalName());
// if loading from an InMemoryJarFile, the process is a bit different...
if (cl instanceof JarLoader) {
String[] classes = ((JarLoader) cl).getInnerClassesForClass(c.getName());
for (String innerName : classes) {
Class<?> clz = null;
try {
clz = cl.loadClass(innerName);
}
catch (ClassNotFoundException e) {
String msg = "Unable to load " + c + " inner class " + innerName +
" from in-memory jar representation.";
throw new VoltCompilerException(msg);
}
assert(clz != null);
builder.add(clz);
}
}
else {
String stem = c.getName().replace('.', '/');
String cpath = stem + ".class";
URL curl = cl.getResource(cpath);
if (curl == null) {
throw new VoltCompilerException(String.format(
"Failed to find class file %s in jar.", cpath));
}
// load from an on-disk jar
if ("jar".equals(curl.getProtocol())) {
Pattern nameRE = Pattern.compile("\\A(" + stem + "\\$[^/]+).class\\z");
String jarFN;
try {
jarFN = URLDecoder.decode(curl.getFile(), "UTF-8");
}
catch (UnsupportedEncodingException e) {
String msg = "Unable to UTF-8 decode " + curl.getFile() + " for class " + c;
throw new VoltCompilerException(msg);
}
jarFN = jarFN.substring(5, jarFN.indexOf('!'));
JarFile jar = null;
try {
jar = new JarFile(jarFN);
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
String name = entries.nextElement().getName();
Matcher mtc = nameRE.matcher(name);
if (mtc.find()) {
String innerName = mtc.group(1).replace('/', '.');
Class<?> inner;
try {
inner = cl.loadClass(innerName);
} catch (ClassNotFoundException e) {
String msg = "Unable to load " + c + " inner class " + innerName;
throw new VoltCompilerException(msg);
}
builder.add(inner);
}
}
}
catch (IOException e) {
String msg = "Cannot access class " + c + " source code location of " + jarFN;
throw new VoltCompilerException(msg);
}
finally {
if ( jar != null) try {jar.close();} catch (Exception ignoreIt) {};
}
}
// load directly from a classfile
else if ("file".equals(curl.getProtocol())) {
Pattern nameRE = Pattern.compile("/(" + stem + "\\$[^/]+).class\\z");
File sourceDH = new File(curl.getFile()).getParentFile();
for (File f: sourceDH.listFiles()) {
Matcher mtc = nameRE.matcher(f.getAbsolutePath());
if (mtc.find()) {
String innerName = mtc.group(1).replace('/', '.');
Class<?> inner;
try {
inner = cl.loadClass(innerName);
} catch (ClassNotFoundException e) {
String msg = "Unable to load " + c + " inner class " + innerName;
throw new VoltCompilerException(msg);
}
builder.add(inner);
}
}
}
}
return builder.build();
}
public boolean addClassToJar(InMemoryJarfile jarOutput, final Class<?> cls)
throws VoltCompiler.VoltCompilerException
{
if (cachedAddedClasses.contains(cls)) {
return false;
} else {
cachedAddedClasses.add(cls);
}
for (final Class<?> nested : getInnerClasses(cls)) {
addClassToJar(jarOutput, nested);
}
String packagePath = cls.getName();
packagePath = packagePath.replace('.', '/');
packagePath += ".class";
String realName = cls.getName();
realName = realName.substring(realName.lastIndexOf('.') + 1);
realName += ".class";
byte [] classBytes = null;
try {
classBytes = getClassAsBytes(cls);
} catch (Exception e) {
final String msg = "Unable to locate classfile for " + realName;
throw new VoltCompilerException(msg);
}
jarOutput.put(packagePath, classBytes);
return true;
}
/**
* @param m_procInfoOverrides the m_procInfoOverrides to set
*/
public void setProcInfoOverrides(Map<String, ProcInfoData> procInfoOverrides) {
m_procInfoOverrides = procInfoOverrides;
}
/**
* Helper enum that scans sax exception messages for deprecated xml elements
*
* @author ssantoro
*/
enum DeprecatedProjectElement {
security(
"(?i)\\Acvc-[^:]+:\\s+Invalid\\s+content\\s+.+?\\s+element\\s+'security'",
"security may be enabled in the deployment file only"
);
/**
* message regular expression that pertains to the deprecated element
*/
private final Pattern messagePattern;
/**
* a suggestion string to exaplain alternatives
*/
private final String suggestion;
DeprecatedProjectElement(String messageRegex, String suggestion) {
this.messagePattern = Pattern.compile(messageRegex);
this.suggestion = suggestion;
}
String getSuggestion() {
return suggestion;
}
/**
* Given a JAXBException it determines whether or not the linked
* exception is associated with a deprecated xml elements
*
* @param jxbex a {@link JAXBException}
* @return an enum of {@code DeprecatedProjectElement} if the
* given exception corresponds to a deprecated xml element
*/
static DeprecatedProjectElement valueOf( JAXBException jxbex) {
if( jxbex == null
|| jxbex.getLinkedException() == null
|| ! (jxbex.getLinkedException() instanceof org.xml.sax.SAXParseException)
) {
return null;
}
org.xml.sax.SAXParseException saxex =
org.xml.sax.SAXParseException.class.cast(jxbex.getLinkedException());
for( DeprecatedProjectElement dpe: DeprecatedProjectElement.values()) {
Matcher mtc = dpe.messagePattern.matcher(saxex.getMessage());
if( mtc.find()) return dpe;
}
return null;
}
}
/**
* Check a loaded catalog. If it needs to be upgraded recompile it and save
* an upgraded jar file.
*
* @param outputJar in-memory jar file (updated in place here)
* @return source version upgraded from or null if not upgraded
* @throws IOException
*/
public String upgradeCatalogAsNeeded(InMemoryJarfile outputJar)
throws IOException
{
// getBuildInfoFromJar() performs some validation.
String[] buildInfoLines = CatalogUtil.getBuildInfoFromJar(outputJar);
String versionFromCatalog = buildInfoLines[0];
// Set if an upgrade happens.
String upgradedFromVersion = null;
// Check if it's compatible (or the upgrade is being forced).
// getConfig() may return null if it's being mocked for a test.
if ( VoltDB.Configuration.m_forceCatalogUpgrade
|| !versionFromCatalog.equals(VoltDB.instance().getVersionString())) {
// Check if there's a project.
VoltCompilerReader projectReader =
(outputJar.containsKey("project.xml")
? new VoltCompilerJarFileReader(outputJar, "project.xml")
: null);
// Patch the buildinfo.
String versionFromVoltDB = VoltDB.instance().getVersionString();
buildInfoLines[0] = versionFromVoltDB;
buildInfoLines[1] = String.format("voltdb-auto-upgrade-to-%s", versionFromVoltDB);
byte[] buildInfoBytes = StringUtils.join(buildInfoLines, "\n").getBytes();
outputJar.put(CatalogUtil.CATALOG_BUILDINFO_FILENAME, buildInfoBytes);
// Gather DDL files for recompilation if not using a project file.
List<VoltCompilerReader> ddlReaderList = new ArrayList<VoltCompilerReader>();
if (projectReader == null) {
Entry<String, byte[]> entry = outputJar.firstEntry();
while (entry != null) {
String path = entry.getKey();
//TODO: It would be better to have a manifest that explicitly lists
// ddl files instead of using a brute force *.sql glob.
if (path.toLowerCase().endsWith(".sql")) {
ddlReaderList.add(new VoltCompilerJarFileReader(outputJar, path));
}
entry = outputJar.higherEntry(entry.getKey());
}
}
// Use the in-memory jarfile-provided class loader so that procedure
// classes can be found and copied to the new file that gets written.
ClassLoader originalClassLoader = m_classLoader;
try {
m_classLoader = outputJar.getLoader();
// Compile and save the file to voltdbroot. Assume it's a test environment if there
// is no catalog context available.
String jarName = String.format("catalog-%s.jar", versionFromVoltDB);
String textName = String.format("catalog-%s.out", versionFromVoltDB);
CatalogContext catalogContext = VoltDB.instance().getCatalogContext();
final String outputJarPath = (catalogContext != null
? new File(catalogContext.cluster.getVoltroot(), jarName).getPath()
: VoltDB.Configuration.getPathToCatalogForTest(jarName));
// Place the compiler output in a text file in the same folder.
final String outputTextPath = (catalogContext != null
? new File(catalogContext.cluster.getVoltroot(), textName).getPath()
: VoltDB.Configuration.getPathToCatalogForTest(textName));
consoleLog.info(String.format(
"Version %s catalog will be automatically upgraded to version %s.",
versionFromCatalog, versionFromVoltDB));
// Do the compilation work.
boolean success = compileInternal(projectReader, outputJarPath, ddlReaderList, outputJar);
if (success) {
// Set up the return string.
upgradedFromVersion = versionFromCatalog;
}
// Summarize the results to a file.
// Briefly log success or failure and mention the output text file.
PrintStream outputStream = new PrintStream(outputTextPath);
try {
if (success) {
summarizeSuccess(outputStream, outputStream, outputJarPath);
consoleLog.info(String.format(
"The catalog was automatically upgraded from " +
"version %s to %s and saved to \"%s\". " +
"Compiler output is available in \"%s\".",
versionFromCatalog, versionFromVoltDB,
outputJarPath, outputTextPath));
}
else {
summarizeErrors(outputStream, outputStream);
outputStream.close();
compilerLog.error("Catalog upgrade failed.");
compilerLog.info(String.format(
"Had attempted to perform an automatic version upgrade of a " +
"catalog that was compiled by an older %s version of VoltDB, " +
"but the automatic upgrade failed. The cluster will not be " +
"able to start until the incompatibility is fixed. " +
"Try re-compiling the catalog with the newer %s version " +
"of the VoltDB compiler. Compiler output from the failed " +
"upgrade is available in \"%s\".",
versionFromCatalog, versionFromVoltDB, outputTextPath));
throw new IOException(String.format(
"Failed to generate upgraded catalog file \"%s\".",
outputJarPath));
}
}
finally {
outputStream.close();
}
}
finally {
// Restore the original class loader
m_classLoader = originalClassLoader;
}
}
return upgradedFromVersion;
}
}
|
package org.voltdb.utils;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.jar.JarOutputStream;
import org.apache.hadoop_voltpatches.util.PureJavaCrc32;
/**
* Given a jarfile, construct a map of entry name => byte array representing
* the contents. Allow it to be modified and written out in flexible ways.
*
*/
public class InMemoryJarfile extends TreeMap<String, byte[]> {
private static final long serialVersionUID = 1L;
protected final JarLoader m_loader = new JarLoader();;
// CONSTRUCTION
public InMemoryJarfile() {}
public InMemoryJarfile(String pathOrURL) throws IOException {
InputStream fin = null;
try {
URL url = new URL(pathOrURL);
fin = url.openStream();
} catch (MalformedURLException ex) {
// Invalid URL. Try as a file.
fin = new FileInputStream(pathOrURL);
}
loadFromStream(fin);
}
public InMemoryJarfile(URL url) throws IOException {
loadFromStream(url.openStream());
}
public InMemoryJarfile(File file) throws IOException {
loadFromStream(new FileInputStream(file));
}
public InMemoryJarfile(byte[] bytes) throws IOException {
loadFromStream(new ByteArrayInputStream(bytes));
}
private void loadFromStream(InputStream in) throws IOException {
JarInputStream jarIn = new JarInputStream(in);
JarEntry catEntry = null;
while ((catEntry = jarIn.getNextJarEntry()) != null) {
byte[] value = readFromJarEntry(jarIn, catEntry);
String key = catEntry.getName();
put(key, value);
}
}
public static byte[] readFromJarEntry(JarInputStream jarIn, JarEntry entry) throws IOException {
int totalRead = 0;
int maxToRead = 4096 << 10;
byte[] buffer = new byte[maxToRead];
byte[] bytes = new byte[maxToRead * 2];
// Keep reading until we run out of bytes for this entry
// We will resize our return value byte array if we run out of space
while (jarIn.available() == 1) {
int readSize = jarIn.read(buffer, 0, buffer.length);
if (readSize > 0) {
totalRead += readSize;
if (totalRead > bytes.length) {
byte[] temp = new byte[bytes.length * 2];
System.arraycopy(bytes, 0, temp, 0, bytes.length);
bytes = temp;
}
System.arraycopy(buffer, 0, bytes, totalRead - readSize, readSize);
}
}
// Trim bytes to proper size
byte retval[] = new byte[totalRead];
System.arraycopy(bytes, 0, retval, 0, totalRead);
return retval;
}
// OUTPUT
public Runnable writeToFile(File file) throws IOException {
final FileOutputStream output = new FileOutputStream(file);
writeToOutputStream(output);
return new Runnable() {
@Override
public void run() {
try {
output.getFD().sync();
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
try {
output.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
};
}
public byte[] getFullJarBytes() throws IOException {
ByteArrayOutputStream output = new ByteArrayOutputStream();
writeToOutputStream(output);
output.close();
return output.toByteArray();
}
protected void writeToOutputStream(OutputStream output) throws IOException {
JarOutputStream jarOut = new JarOutputStream(output);
for (Entry<String, byte[]> e : super.entrySet()) {
assert(e.getValue() != null);
JarEntry entry = new JarEntry(e.getKey());
entry.setSize(e.getValue().length);
// Make the entry time the epoch so that the SHA-1 hash
// built by feeding all of the bytes to it returns the same
// hash for the same catalog.
// Maybe we ought to have a getSHA1() method that does the same
// thing as the getCRC() method below?
//entry.setTime(System.currentTimeMillis());
entry.setTime(0);
jarOut.putNextEntry(entry);
jarOut.write(e.getValue());
jarOut.flush();
jarOut.closeEntry();
}
jarOut.finish();
}
// UTILITY
public long getCRC() throws IOException {
PureJavaCrc32 crc = new PureJavaCrc32();
for (Entry<String, byte[]> e : super.entrySet()) {
if (e.getKey().equals("buildinfo.txt") || e.getKey().equals("catalog-report.html")) {
continue;
}
crc.update(e.getKey().getBytes("UTF-8"));
crc.update(e.getValue());
}
return crc.getValue();
}
public byte[] put(String key, File value) throws IOException {
byte[] bytes = null;
int bytesRead = 0;
bytes = new byte[(int) value.length()];
BufferedInputStream in = new BufferedInputStream(new FileInputStream(value));
try {
bytesRead = in.read(bytes);
}
finally {
in.close();
}
assert(bytesRead != -1);
return put(key, bytes);
}
// CLASSLOADING
public class JarLoader extends ClassLoader {
final Map<String, Class<?>> m_cache = new HashMap<String, Class<?>>();
final Set<String> m_classNames = new HashSet<String>();
void noteUpdated(String key) {
if (!key.endsWith(".class"))
return;
String javaClassName = key.replace(File.separatorChar, '.');
javaClassName = javaClassName.substring(0, javaClassName.length() - 6);
m_classNames.add(javaClassName);
}
void noteRemoved(String key) {
if (!key.endsWith(".class"))
return;
String javaClassName = key.replace(File.separatorChar, '.');
javaClassName = javaClassName.substring(0, javaClassName.length() - 6);
m_classNames.remove(javaClassName);
m_cache.remove(javaClassName);
}
// prevent this from being publicly called
JarLoader() {}
/**
* @return The InMemoryJarFile instance owning this loader.
*/
public InMemoryJarfile getInMemoryJarfile() {
return InMemoryJarfile.this;
}
@Override
public synchronized Class<?> loadClass(String className) throws ClassNotFoundException {
// try the fast cache first
Class<?> result;
if (m_cache.containsKey(className)) {
//System.out.println("found in cache.");
return m_cache.get(className);
}
// now look through the list
if (m_classNames.contains(className)) {
String classPath = className.replace('.', File.separatorChar) + ".class";
byte bytes[] = get(classPath);
if (bytes == null)
throw new ClassNotFoundException(className);
result = this.defineClass(className, bytes, 0, bytes.length);
resolveClass(result);
m_cache.put(className, result);
return result;
}
// default to parent
//System.out.println("deferring to parent.");
return getParent().loadClass(className);
}
/**
* For a given class, find all
*/
public String[] getInnerClassesForClass(String className) {
List<String> matches = new ArrayList<>();
for (String potential : m_classNames) {
if (potential.startsWith(className + ".")) {
matches.add(potential);
}
}
return matches.toArray(new String[0]);
}
}
public JarLoader getLoader() {
return m_loader;
}
// OVERRIDDEN TREEMAP OPERATIONS
@Override
public byte[] put(String key, byte[] value) {
if (value == null)
throw new RuntimeException("InMemoryJarFile cannon contain null entries.");
byte[] retval = super.put(key, value);
m_loader.noteUpdated(key);
return retval;
}
@Override
public void putAll(Map<? extends String, ? extends byte[]> m) {
for (Entry<? extends String, ? extends byte[]> e : m.entrySet()) {
put(e.getKey(), e.getValue());
}
}
@Override
public byte[] remove(Object key) {
String realKey = null;
try {
realKey = (String) key;
}
catch (Exception e) {
return null;
}
m_loader.noteRemoved(realKey);
return super.remove(key);
}
@Override
public void clear() {
for (String key : keySet())
m_loader.noteRemoved(key);
super.clear();
}
@Override
public Object clone() {
throw new UnsupportedOperationException();
}
@Override
public java.util.Map.Entry<String, byte[]> pollFirstEntry() {
throw new UnsupportedOperationException();
}
@Override
public java.util.Map.Entry<String, byte[]> pollLastEntry() {
throw new UnsupportedOperationException();
}
}
|
package com.exedio.cope;
public class DeleteTest extends AbstractLibTest
{
public DeleteTest()
{
super(Main.deleteModel);
}
private DeleteItem item;
private DeleteOtherItem other;
public void setUp() throws Exception
{
super.setUp();
}
public void testForbid() throws ConstraintViolationException
{
assertEquals(Item.FORBID, item.selfForbid.getDeletePolicy());
assertEquals(Item.FORBID, item.otherForbid.getDeletePolicy());
assertEqualsUnmodifiable(list(item.selfForbid, item.selfNullify), item.TYPE.getReferences());
assertEqualsUnmodifiable(list(item.otherForbid, item.otherNullify), other.TYPE.getReferences());
// other type
other = new DeleteOtherItem("other");
item = new DeleteItem("item");
item.setOtherForbid(other);
assertDeleteFails(other, item.otherForbid);
// other item
DeleteItem item2 = new DeleteItem("item2");
item.setOtherForbid(null);
item.setSelfForbid(item2);
assertDeleteFails(item2, item.selfForbid);
// same item
item.setSelfForbid(item);
if(hsqldb||mysql)
{
assertDeleteFails(item, item.selfForbid);
item.setSelfForbid(null);
}
assertDelete(item);
assertDelete(other);
assertDelete(item2);
}
public void testNullify() throws ConstraintViolationException
{
assertEquals(Item.NULLIFY, item.selfNullify.getDeletePolicy());
assertEquals(Item.NULLIFY, item.otherNullify.getDeletePolicy());
// other type
item = new DeleteItem("itema");
other = new DeleteOtherItem("other");
item.setOtherNullify(other);
assertEquals(other, item.getOtherNullify());
assertDelete(other);
assertEquals(null, item.getOtherNullify());
// other item
DeleteItem item2 = new DeleteItem("item");
item.setSelfNullify(item2);
assertEquals(item2, item.getSelfNullify());
assertDelete(item2);
assertEquals(null, item.getSelfNullify());
// same item
item.setSelfNullify(item);
assertDelete(item);
}
void assertDeleteFails(final Item item, final ItemAttribute attribute)
{
try
{
item.deleteCopeItem();
fail("should have thrown IntegrityViolationException");
}
catch(IntegrityViolationException e)
{
assertEquals(mysql ? null : attribute, e.getAttribute());
assertEquals(null/*TODO*/, e.getItem());
}
assertTrue(item.existsCopeItem());
}
}
|
package jolie.lang.parse;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import jolie.lang.Constants;
import jolie.lang.NativeType;
import jolie.lang.parse.ast.AndConditionNode;
import jolie.lang.parse.ast.AssignStatement;
import jolie.lang.parse.ast.CompareConditionNode;
import jolie.lang.parse.ast.CompensateStatement;
import jolie.lang.parse.ast.ConstantIntegerExpression;
import jolie.lang.parse.ast.ConstantRealExpression;
import jolie.lang.parse.ast.ConstantStringExpression;
import jolie.lang.parse.ast.CorrelationSetInfo;
import jolie.lang.parse.ast.CurrentHandlerStatement;
import jolie.lang.parse.ast.DeepCopyStatement;
import jolie.lang.parse.ast.DefinitionCallStatement;
import jolie.lang.parse.ast.DefinitionNode;
import jolie.lang.parse.ast.EmbeddedServiceNode;
import jolie.lang.parse.ast.ExecutionInfo;
import jolie.lang.parse.ast.ExitStatement;
import jolie.lang.parse.ast.ExpressionConditionNode;
import jolie.lang.parse.ast.ForEachStatement;
import jolie.lang.parse.ast.ForStatement;
import jolie.lang.parse.ast.IfStatement;
import jolie.lang.parse.ast.InstallFixedVariableExpressionNode;
import jolie.lang.parse.ast.InstallFunctionNode;
import jolie.lang.parse.ast.InstallStatement;
import jolie.lang.parse.ast.IsTypeExpressionNode;
import jolie.lang.parse.ast.LinkInStatement;
import jolie.lang.parse.ast.LinkOutStatement;
import jolie.lang.parse.ast.NDChoiceStatement;
import jolie.lang.parse.ast.NotConditionNode;
import jolie.lang.parse.ast.NotificationOperationStatement;
import jolie.lang.parse.ast.NullProcessStatement;
import jolie.lang.parse.ast.OLSyntaxNode;
import jolie.lang.parse.ast.OneWayOperationDeclaration;
import jolie.lang.parse.ast.OneWayOperationStatement;
import jolie.lang.parse.ast.OrConditionNode;
import jolie.lang.parse.ast.OutputPortInfo;
import jolie.lang.parse.ast.ParallelStatement;
import jolie.lang.parse.ast.PointerStatement;
import jolie.lang.parse.ast.PostDecrementStatement;
import jolie.lang.parse.ast.PostIncrementStatement;
import jolie.lang.parse.ast.PreDecrementStatement;
import jolie.lang.parse.ast.PreIncrementStatement;
import jolie.lang.parse.ast.ProductExpressionNode;
import jolie.lang.parse.ast.Program;
import jolie.lang.parse.ast.RequestResponseOperationDeclaration;
import jolie.lang.parse.ast.RequestResponseOperationStatement;
import jolie.lang.parse.ast.Scope;
import jolie.lang.parse.ast.SequenceStatement;
import jolie.lang.parse.ast.InputPortInfo;
import jolie.lang.parse.ast.InterfaceDefinition;
import jolie.lang.parse.ast.OperationCollector;
import jolie.lang.parse.ast.PortInfo;
import jolie.lang.parse.ast.SolicitResponseOperationStatement;
import jolie.lang.parse.ast.SpawnStatement;
import jolie.lang.parse.ast.SumExpressionNode;
import jolie.lang.parse.ast.SynchronizedStatement;
import jolie.lang.parse.ast.ThrowStatement;
import jolie.lang.parse.ast.TypeCastExpressionNode;
import jolie.lang.parse.ast.UndefStatement;
import jolie.lang.parse.ast.ValueVectorSizeExpressionNode;
import jolie.lang.parse.ast.VariableExpressionNode;
import jolie.lang.parse.ast.VariablePathNode;
import jolie.lang.parse.ast.WhileStatement;
import jolie.lang.parse.ast.types.TypeDefinition;
import jolie.lang.parse.ast.types.TypeDefinitionLink;
import jolie.lang.parse.ast.types.TypeDefinitionUndefined;
import jolie.lang.parse.ast.types.TypeInlineDefinition;
import jolie.util.Pair;
import jolie.util.Range;
/** Parser for a .ol file.
* @author Fabrizio Montesi
*
*/
public class OLParser extends AbstractParser
{
private final Program program = new Program( new ParsingContext() );
private final Map< String, Scanner.Token > constantsMap =
new HashMap< String, Scanner.Token >();
private boolean insideInstallFunction = false;
private String[] includePaths;
private final Map< String, InterfaceDefinition > interfaces =
new HashMap< String, InterfaceDefinition >();
private final Map< String, TypeDefinition > definedTypes = createTypeDeclarationMap();
private final ClassLoader classLoader;
private String comment;
private boolean commentIsPreset;
public OLParser( Scanner scanner, String[] includePaths, ClassLoader classLoader )
{
super( scanner );
this.includePaths = includePaths;
this.classLoader = classLoader;
}
public void putConstants( Map< String, Scanner.Token > constantsToPut )
{
constantsMap.putAll( constantsToPut );
}
static public Map< String, TypeDefinition > createTypeDeclarationMap()
{
Map< String, TypeDefinition > definedTypes = new HashMap< String, TypeDefinition >();
// Fill in defineTypes with all the supported native types (string, int, double, ...)
for( NativeType type : NativeType.values() ) {
definedTypes.put( type.id(), new TypeInlineDefinition( new ParsingContext(), type.id(), type, Constants.RANGE_ONE_TO_ONE ) );
}
definedTypes.put( TypeDefinitionUndefined.UNDEFINED_KEYWORD, TypeDefinitionUndefined.getInstance() );
return definedTypes;
}
public Program parse()
throws IOException, ParserException
{
_parse();
if ( initSequence != null ) {
program.addChild( new DefinitionNode( getContext(), "init", initSequence ) );
}
program.addChild( main );
return program;
}
private void _parse()
throws IOException, ParserException
{
//parseComment();// not very good solution to be reconsider position
getToken();
Scanner.Token t;
do {
t = token;
parseInclude();
parseConstants();
parseInclude();
parseExecution();
parseInclude();
parseCorrelationSet();
parseInclude();
parseTypes();
parseInclude();
parseInterfaceOrPort();
parseInclude();
parseEmbedded();
parseInclude();
parseCode();
} while( t != token );
if ( t.isNot( Scanner.TokenType.EOF ) ) {
throwException( "Invalid token encountered" );
}
}
private void parseTypes()
throws IOException, ParserException
{
String typeName;
TypeDefinition currentType;
while( token.isKeyword( "type" ) ) {
getToken();
typeName = token.content();
eat( Scanner.TokenType.ID, "expected type name" );
eat( Scanner.TokenType.COLON, "expected COLON (cardinality not allowed in root type declaration, it is fixed to [1,1])" );
NativeType nativeType = readNativeType();
if ( nativeType == null ) { // It's a user-defined type
currentType = new TypeDefinitionLink( getContext(), typeName, Constants.RANGE_ONE_TO_ONE, definedTypes.get( token.content() ) );
getToken();
} else {
currentType = new TypeInlineDefinition( getContext(), typeName, nativeType, Constants.RANGE_ONE_TO_ONE );
getToken();
if ( token.is( Scanner.TokenType.LCURLY ) ) { // We have sub-types to parse
parseSubTypes( (TypeInlineDefinition)currentType );
}
}
// Keep track of the root types to support them in successive type declarations
definedTypes.put( typeName, currentType );
program.addChild( currentType );
}
}
private NativeType readNativeType()
{
if ( token.is( Scanner.TokenType.CAST_INT ) ) {
return NativeType.INT;
} else if ( token.is( Scanner.TokenType.CAST_REAL ) ) {
return NativeType.DOUBLE;
} else if ( token.is( Scanner.TokenType.CAST_STRING ) ) {
return NativeType.STRING;
} else {
return NativeType.fromString( token.content() );
}
}
private void parseSubTypes( TypeInlineDefinition type )
throws IOException, ParserException
{
eat( Scanner.TokenType.LCURLY, "expected {" );
if ( token.is( Scanner.TokenType.QUESTION_MARK ) ) {
type.setUntypedSubTypes( true );
getToken();
} else {
TypeDefinition currentSubType;
while( !token.is( Scanner.TokenType.RCURLY ) ) {
currentSubType = parseSubType();
if ( type.hasSubType( currentSubType.id() ) ) {
throwException( "sub-type " + currentSubType.id() + " conflicts with another sub-type with the same name" );
}
type.putSubType( currentSubType );
}
}
eat( Scanner.TokenType.RCURLY, "RCURLY expected" );
}
private TypeDefinition parseSubType()
throws IOException, ParserException
{
eat( Scanner.TokenType.DOT, "sub-type syntax error (dot not found)" );
// SubType id
String id = token.content();
eat( Scanner.TokenType.ID, "expected type name" );
Range cardinality = parseCardinality();
eat( Scanner.TokenType.COLON, "expected COLON" );
NativeType nativeType = readNativeType();
if ( nativeType == null ) { // It's a user-defined type
TypeDefinitionLink linkedSubType;
linkedSubType = new TypeDefinitionLink( getContext(), id, cardinality, definedTypes.get( token.content() ) );
getToken();
return linkedSubType;
} else {
getToken();
TypeInlineDefinition inlineSubType = new TypeInlineDefinition( getContext(), id, nativeType, cardinality );
if ( token.is( Scanner.TokenType.LCURLY ) ) { // Has ulterior sub-types
parseSubTypes( inlineSubType );
}
return inlineSubType;
}
}
private Range parseCardinality()
throws IOException, ParserException
{
int min = -1;
int max = -1;
if ( token.is( Scanner.TokenType.COLON ) ) { // Default (no cardinality specified)
min = 1;
max = 1;
} else if ( token.is( Scanner.TokenType.QUESTION_MARK ) ) {
min = 0;
max = 1;
getToken();
} else if ( token.is( Scanner.TokenType.ASTERISK ) ) {
min = 0;
max = Integer.MAX_VALUE;
getToken();
} else if ( token.is( Scanner.TokenType.LSQUARE ) ) {
getToken(); // eat [
// Minimum
assertToken( Scanner.TokenType.INT, "expected int value" );
min = Integer.parseInt( token.content() );
if ( min < 0 ) {
throwException( "Minimum number of occurences of a sub-type must be positive or zero" );
}
getToken();
eat( Scanner.TokenType.COMMA, "expected comma separator" );
// Maximum
if ( token.is( Scanner.TokenType.INT ) ) {
max = Integer.parseInt( token.content() );
if ( max < 1 ) {
throwException( "Maximum number of occurences of a sub-type must be positive" );
}
} else if ( token.is( Scanner.TokenType.ASTERISK ) ) {
max = Integer.MAX_VALUE;
} else {
throwException( "Maximum number of sub-type occurences not valid: " + token.content() );
}
getToken();
eat( Scanner.TokenType.RSQUARE, "expected ]" );
} else {
throwException( "Sub-type cardinality syntax error" );
}
return new Range( min, max );
}
private void parseEmbedded()
throws IOException, ParserException
{
if ( token.isKeyword( "embedded" ) ) {
String servicePath, portId;
getToken();
eat( Scanner.TokenType.LCURLY, "expected {" );
boolean keepRun = true;
Constants.EmbeddedServiceType type;
while ( keepRun ) {
type = null;
if ( token.isKeyword( "Java" ) ) {
type = Constants.EmbeddedServiceType.JAVA;
} else if ( token.isKeyword( "Jolie" ) ) {
type = Constants.EmbeddedServiceType.JOLIE;
} else if ( token.isKeyword( "JavaScript" ) ) {
type = Constants.EmbeddedServiceType.JAVASCRIPT;
}
if ( type == null ) {
keepRun = false;
} else {
getToken();
eat( Scanner.TokenType.COLON, "expected : after embedded service type" );
checkConstant();
while ( token.is( Scanner.TokenType.STRING ) ) {
servicePath = token.content();
getToken();
if ( token.isKeyword( "in" ) ) {
eatKeyword( "in", "expected in" );
assertToken( Scanner.TokenType.ID, "expected output port name" );
portId = token.content();
getToken();
} else {
portId = null;
}
program.addChild(
new EmbeddedServiceNode(
getContext(),
type,
servicePath,
portId ) );
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
break;
}
}
}
}
eat( Scanner.TokenType.RCURLY, "expected }" );
}
}
private void parseCorrelationSet()
throws IOException, ParserException
{
if ( token.isKeyword( "cset" ) ) {
getToken();
eat( Scanner.TokenType.LCURLY, "expected {" );
Set<List<VariablePathNode>> cset = new HashSet<List<VariablePathNode>>();
List<VariablePathNode> list;
while ( token.is( Scanner.TokenType.ID ) ) {
list = new LinkedList<VariablePathNode>();
list.add( parseVariablePath() );
if ( token.is( Scanner.TokenType.COLON ) ) {
getToken();
while ( token.is( Scanner.TokenType.ID ) ) {
list.add( parseVariablePath() );
}
}
cset.add( list );
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
break;
}
}
program.addChild( new CorrelationSetInfo( getContext(), cset ) );
eat( Scanner.TokenType.RCURLY, "expected }" );
}
}
private void parseExecution()
throws IOException, ParserException
{
if ( token.is( Scanner.TokenType.EXECUTION ) ) {
Constants.ExecutionMode mode = Constants.ExecutionMode.SEQUENTIAL;
getToken();
eat( Scanner.TokenType.LCURLY, "{ expected" );
assertToken( Scanner.TokenType.ID, "expected execution modality" );
if ( "sequential".equals( token.content() ) ) {
mode = Constants.ExecutionMode.SEQUENTIAL;
} else if ( "concurrent".equals( token.content() ) ) {
mode = Constants.ExecutionMode.CONCURRENT;
} else if ( "single".equals( token.content() ) ) {
mode = Constants.ExecutionMode.SINGLE;
} else {
throwException( "Expected execution mode, found " + token.content() );
}
program.addChild( new ExecutionInfo( getContext(), mode ) );
getToken();
eat( Scanner.TokenType.RCURLY, "} expected" );
}
}
private void parseConstants()
throws IOException, ParserException
{
if ( token.is( Scanner.TokenType.CONSTANTS ) ) {
getToken();
eat( Scanner.TokenType.LCURLY, "expected {" );
boolean keepRun = true;
while ( token.is( Scanner.TokenType.ID ) && keepRun ) {
String cId = token.content();
getToken();
eat( Scanner.TokenType.ASSIGN, "expected =" );
if ( token.isValidConstant() == false ) {
throwException( "expected string, integer, double or identifier constant" );
}
if ( constantsMap.containsKey( cId ) == false ) {
constantsMap.put( cId, token );
}
getToken();
if ( token.isNot( Scanner.TokenType.COMMA ) ) {
keepRun = false;
} else {
getToken();
}
}
eat( Scanner.TokenType.RCURLY, "expected }" );
}
}
private static class IncludeFile {
private final InputStream inputStream;
private final String parentPath;
private IncludeFile( InputStream inputStream, String parentPath )
{
this.inputStream = inputStream;
this.parentPath = parentPath;
}
private InputStream getInputStream()
{
return inputStream;
}
private String getParentPath()
{
return parentPath;
}
}
private static IncludeFile retrieveIncludeFile( String path, String filename )
{
IncludeFile ret = null;
File f = new File(
new StringBuilder()
.append( path )
.append( Constants.fileSeparator )
.append( filename )
.toString()
);
try {
ret = new IncludeFile(
new BufferedInputStream( new FileInputStream( f ) ),
f.getParent()
);
} catch( FileNotFoundException e ) {
try {
String urlStr =
new StringBuilder()
.append( path )
.append( filename )
.toString();
URL url = null;
if ( urlStr.startsWith( "jap:" ) || urlStr.startsWith( "jar:" ) ) {
/*
* We need the embedded URL path, otherwise URI.normalize
* is going to do nothing.
*/
url = new URL(
urlStr.substring( 0,4 ) + new URI( urlStr.substring( 4 ) ).normalize().toString()
);
} else {
url = new URL( new URI( urlStr ).normalize().toString() );
}
ret = new IncludeFile(
url.openStream(),
path
);
} catch( MalformedURLException mue ) {
} catch( IOException ioe ) {
} catch( URISyntaxException use ) {}
}
return ret;
}
private void parseInclude()
throws IOException, ParserException
{
String[] origIncludePaths;
IncludeFile includeFile;
while ( token.is( Scanner.TokenType.INCLUDE ) ) {
getToken();
Scanner oldScanner = scanner();
assertToken( Scanner.TokenType.STRING, "expected filename to include" );
String includeStr = token.content();
includeFile = null;
// Try the same directory of the program file first.
if ( includePaths.length > 1 ) {
includeFile = retrieveIncludeFile( includePaths[0], includeStr );
}
if ( includeFile == null ) {
URL includeURL = classLoader.getResource( includeStr );
if ( includeURL != null ) {
includeFile = new IncludeFile( includeURL.openStream(), null );
}
}
for ( int i = 1; i < includePaths.length && includeFile == null; i++ ) {
includeFile = retrieveIncludeFile( includePaths[i], includeStr );
}
if ( includeFile == null ) {
throwException( "File not found: " + includeStr );
}
origIncludePaths = includePaths;
setScanner( new Scanner( includeFile.getInputStream(), includeStr ) );
if ( includeFile.getParentPath() == null ) {
includePaths = Arrays.copyOf( origIncludePaths, origIncludePaths.length );
} else {
includePaths = Arrays.copyOf( origIncludePaths, origIncludePaths.length + 1 );
includePaths[ origIncludePaths.length ] = includeFile.getParentPath();
}
_parse();
includePaths = origIncludePaths;
setScanner( oldScanner );
getToken();
}
}
private boolean checkConstant()
{
if ( token.is( Scanner.TokenType.ID ) ) {
Scanner.Token t = null;
Constants.Predefined p = Constants.Predefined.get( token.content() );
if ( p != null ) {
t = p.token();
} else {
t = constantsMap.get( token.content() );
}
if ( t != null ) {
token = t;
return true;
}
}
return false;
}
private PortInfo parsePort()
throws IOException, ParserException
{
PortInfo portInfo = null;
if ( token.isKeyword( "inputPort" ) ) {
portInfo = parseInputPortInfo();
} else if ( token.isKeyword( "outputPort" ) ) {
getToken();
assertToken( Scanner.TokenType.ID, "expected output port identifier" );
OutputPortInfo p = new OutputPortInfo( getContext(), token.content() );
getToken();
eat( Scanner.TokenType.LCURLY, "expected {" );
parseOutputPortInfo( p );
program.addChild( p );
eat( Scanner.TokenType.RCURLY, "expected }" );
portInfo = p;
}
return portInfo;
}
private void parseInterfaceOrPort()
throws IOException, ParserException
{
boolean keepRun = true;
DocumentedNode node = null;
commentIsPreset = false;
while( keepRun ) {
if ( token.is( Scanner.TokenType.DOCUMENTATION_COMMENT ) ) {
commentIsPreset = true;
comment = token.content();
getToken();
} else if ( token.isKeyword( "interface" ) ) {
node = parseInterface();
} else if ( token.isKeyword( "inputPort" ) ) {
node = parsePort();
} else if ( token.isKeyword( "outputPort" ) ) {
node = parsePort();
} else {
keepRun = false;
}
if ( commentIsPreset && node != null ) {
node.setDocumentation( comment );
commentIsPreset = false;
node = null;
}
}
}
private InputPortInfo parseInputPortInfo()
throws IOException, ParserException
{
String inputPortName;
String protocolId;
URI inputPortLocation;
List< String > interfacesList = new ArrayList< String >();
OLSyntaxNode protocolConfiguration = new NullProcessStatement( getContext() );
getToken();
assertToken( Scanner.TokenType.ID, "expected inputPort name" );
inputPortName = token.content();
getToken();
eat( Scanner.TokenType.LCURLY, "{ expected" );
InterfaceDefinition iface = new InterfaceDefinition( getContext(), "Internal interface for: " + inputPortName );
inputPortLocation = null;
protocolId = null;
Map<String, String> redirectionMap = new HashMap<String, String>();
List< String > aggregationList = new LinkedList< String >();
while ( token.isNot( Scanner.TokenType.RCURLY ) ) {
if ( token.is( Scanner.TokenType.OP_OW ) ) {
parseOneWayOperations( iface );
} else if ( token.is( Scanner.TokenType.OP_RR ) ) {
parseRequestResponseOperations( iface );
} else if ( token.isKeyword( "Location" ) ) {
if ( inputPortLocation != null ) {
throwException( "Location already defined for service " + inputPortName );
}
getToken();
eat( Scanner.TokenType.COLON, "expected : after Location" );
checkConstant();
assertToken( Scanner.TokenType.STRING, "expected inputPort location string" );
try {
inputPortLocation = new URI( token.content() );
} catch ( URISyntaxException e ) {
throwException( e );
}
getToken();
} else if ( token.isKeyword( "Interfaces" ) ) {
getToken();
eat( Scanner.TokenType.COLON, "expected : after Interfaces" );
boolean keepRun = true;
while( keepRun ) {
assertToken( Scanner.TokenType.ID, "expected interface name" );
interfacesList.add( token.content() );
InterfaceDefinition i = interfaces.get( token.content() );
if ( i == null ) {
throwException( "Invalid interface name: " + token.content() );
}
i.copyTo( iface );
getToken();
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
keepRun = false;
}
}
} else if ( token.isKeyword( "Protocol" ) ) {
if ( protocolId != null ) {
throwException( "Protocol already defined for inputPort " + inputPortName );
}
getToken();
eat( Scanner.TokenType.COLON, "expected :" );
checkConstant();
assertToken( Scanner.TokenType.ID, "expected protocol identifier" );
protocolId = token.content();
getToken();
if ( token.is( Scanner.TokenType.LCURLY ) ) {
addTokens( Arrays.asList(
new Scanner.Token( Scanner.TokenType.ID, Constants.GLOBAL ),
new Scanner.Token( Scanner.TokenType.DOT ),
new Scanner.Token( Scanner.TokenType.ID, Constants.INPUT_PORTS_NODE_NAME ),
new Scanner.Token( Scanner.TokenType.DOT ),
new Scanner.Token( Scanner.TokenType.ID, inputPortName ),
new Scanner.Token( Scanner.TokenType.DOT ),
new Scanner.Token( Scanner.TokenType.ID, Constants.PROTOCOL_NODE_NAME ),
token ) );
// Protocol configuration
getToken();
protocolConfiguration = parseInVariablePathProcess( false );
}
} else if ( token.isKeyword( "Redirects" ) ) {
getToken();
eat( Scanner.TokenType.COLON, "expected :" );
String subLocationName;
while ( token.is( Scanner.TokenType.ID ) ) {
subLocationName = token.content();
getToken();
eat( Scanner.TokenType.ARROW, "expected =>" );
assertToken( Scanner.TokenType.ID, "expected outputPort identifier" );
redirectionMap.put( subLocationName, token.content() );
getToken();
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
break;
}
}
} else if ( token.isKeyword( "Aggregates" ) ) {
getToken();
eat( Scanner.TokenType.COLON, "expected :" );
while ( token.is( Scanner.TokenType.ID ) ) {
aggregationList.add( token.content() );
getToken();
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
break;
}
}
} else {
throwException( "Unrecognized token in inputPort " + inputPortName );
}
}
eat( Scanner.TokenType.RCURLY, "} expected" );
if ( inputPortLocation == null ) {
throwException( "expected location URI for " + inputPortName );
} else if ( iface.operationsMap().isEmpty() && redirectionMap.isEmpty() && aggregationList.isEmpty() ) {
throwException( "expected at least one operation, interface, aggregation or redirection for inputPort " + inputPortName );
} else if ( protocolId == null && !inputPortLocation.toString().equals( Constants.LOCAL_LOCATION_KEYWORD ) ) {
throwException( "expected protocol for inputPort " + inputPortName );
}
InputPortInfo iport = new InputPortInfo( getContext(), inputPortName, inputPortLocation, protocolId, protocolConfiguration, aggregationList.toArray( new String[ aggregationList.size() ] ), redirectionMap );
iport.setInterfacesList( interfacesList );
iface.copyTo( iport );
program.addChild( iport );
return iport;
}
private InterfaceDefinition parseInterface()
throws IOException, ParserException
{
String name;
InterfaceDefinition iface=null;
if ( token.isKeyword( "interface" ) ) {
getToken();
assertToken( Scanner.TokenType.ID, "expected interface name" );
name = token.content();
getToken();
eat( Scanner.TokenType.LCURLY, "expected {" );
iface = new InterfaceDefinition( getContext(), name );
parseOperations( iface );
interfaces.put( name, iface );
program.addChild( iface );
eat( Scanner.TokenType.RCURLY, "expected }" );
}
return iface;
}
private void parseOperations( OperationCollector oc )
throws IOException, ParserException
{
boolean keepRun = true;
while( keepRun ) {
if ( token.is( Scanner.TokenType.OP_OW ) ) {
parseOneWayOperations( oc );
} else if ( token.is( Scanner.TokenType.OP_RR ) ) {
parseRequestResponseOperations( oc );
} else {
keepRun = false;
}
}
}
private void parseOutputPortInfo( OutputPortInfo p )
throws IOException, ParserException
{
List< String > interfacesList = new ArrayList< String >();
boolean keepRun = true;
while ( keepRun ) {
if ( token.is( Scanner.TokenType.OP_OW ) ) {
parseOneWayOperations( p );
} else if ( token.is( Scanner.TokenType.OP_RR ) ) {
parseRequestResponseOperations( p );
} else if ( token.isKeyword( "Interfaces" ) ) {
getToken();
eat( Scanner.TokenType.COLON, "expected : after Interfaces" );
boolean r = true;
while( r ) {
assertToken( Scanner.TokenType.ID, "expected interface name" );
interfacesList.add( token.content() );
InterfaceDefinition i = interfaces.get( token.content() );
if ( i == null ) {
throwException( "Invalid interface name: " + token.content() );
}
i.copyTo( p );
getToken();
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
r = false;
}
}
p.setInterfacesList( interfacesList );
} else if ( token.isKeyword( "Location" ) ) {
if ( p.location() != null ) {
throwException( "Location already defined for output port " + p.id() );
}
getToken();
eat( Scanner.TokenType.COLON, "expected :" );
checkConstant();
assertToken( Scanner.TokenType.STRING, "expected location string" );
URI location = null;
try {
location = new URI( token.content() );
} catch ( URISyntaxException e ) {
throwException( e );
}
p.setLocation( location );
getToken();
} else if ( token.isKeyword( "Protocol" ) ) {
if ( p.protocolId() != null ) {
throwException( "Protocol already defined for output port " + p.id() );
}
getToken();
eat( Scanner.TokenType.COLON, "expected :" );
checkConstant();
assertToken( Scanner.TokenType.ID, "expected protocol identifier" );
p.setProtocolId( token.content() );
getToken();
if ( token.is( Scanner.TokenType.LCURLY ) ) {
addTokens( Arrays.asList(
new Scanner.Token( Scanner.TokenType.ID, p.id() ),
new Scanner.Token( Scanner.TokenType.DOT ),
new Scanner.Token( Scanner.TokenType.ID, "protocol" ),
token ) );
// Protocol configuration
getToken();
p.setProtocolConfiguration( parseInVariablePathProcess( false ) );
p.setInterfacesList( interfacesList );
}
} else {
keepRun = false;
}
}
}
private void parseOneWayOperations( OperationCollector oc )
throws IOException, ParserException
{
getToken();
eat( Scanner.TokenType.COLON, "expected :" );
boolean keepRun = true;
String opId;
while( keepRun ) {
checkConstant();
if ( token.is( Scanner.TokenType.ID ) ) {
opId = token.content();
OneWayOperationDeclaration opDecl = new OneWayOperationDeclaration( getContext(), opId );
getToken();
if ( token.is( Scanner.TokenType.LPAREN ) ) { // Type declaration
getToken(); //eat (
if ( definedTypes.containsKey( token.content() ) == false ) {
throwException( "invalid type: " + token.content() );
}
opDecl.setRequestType( definedTypes.get( token.content() ) );
getToken(); // eat the type name
eat( Scanner.TokenType.RPAREN, "expected )" );
}
oc.addOperation( opDecl );
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
keepRun = false;
}
} else {
keepRun = false;
}
}
}
private void parseRequestResponseOperations( OperationCollector oc )
throws IOException, ParserException
{
getToken();
eat( Scanner.TokenType.COLON, "expected :" );
boolean keepRun = true;
String opId;
while( keepRun ) {
checkConstant();
if ( token.is( Scanner.TokenType.ID ) ) {
opId = token.content();
getToken();
String requestTypeName = TypeDefinitionUndefined.UNDEFINED_KEYWORD;
String responseTypeName = TypeDefinitionUndefined.UNDEFINED_KEYWORD;
if ( token.is( Scanner.TokenType.LPAREN ) ) {
getToken(); //eat (
requestTypeName = token.content();
getToken();
eat( Scanner.TokenType.RPAREN, "expected )" );
eat( Scanner.TokenType.LPAREN, "expected (" );
responseTypeName = token.content();
getToken();
eat( Scanner.TokenType.RPAREN, "expected )" );
}
Map< String, TypeDefinition > faultTypesMap = new HashMap< String, TypeDefinition >();
if ( token.is( Scanner.TokenType.THROWS ) ) {
getToken();
while( token.is( Scanner.TokenType.ID ) ) {
String faultName = token.content();
String faultTypeName = TypeDefinitionUndefined.UNDEFINED_KEYWORD;
getToken();
if ( token.is( Scanner.TokenType.LPAREN ) ) {
getToken(); //eat (
faultTypeName = token.content();
getToken();
eat( Scanner.TokenType.RPAREN, "expected )" );
}
faultTypesMap.put( faultName, definedTypes.get( faultTypeName ) );
}
}
if ( requestTypeName != null && definedTypes.containsKey( requestTypeName ) == false ) {
throwException( "invalid type: " + requestTypeName );
}
if ( responseTypeName != null && definedTypes.containsKey( responseTypeName ) == false ) {
throwException( "invalid type: " + requestTypeName );
}
RequestResponseOperationDeclaration opRR =
new RequestResponseOperationDeclaration(
getContext(),
opId,
definedTypes.get( requestTypeName ),
definedTypes.get( responseTypeName ),
faultTypesMap
);
oc.addOperation( opRR );
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
keepRun = false;
}
} else {
keepRun = false;
}
}
}
private SequenceStatement initSequence = null;
private DefinitionNode main = null;
private void parseCode()
throws IOException, ParserException
{
boolean keepRun = true;
do {
if ( token.is( Scanner.TokenType.DEFINE ) ) {
program.addChild( parseDefinition() );
} else if ( token.isKeyword( "main" ) ) {
if ( main != null ) {
throwException( "you must specify only one main definition" );
}
main = parseMain();
} else if ( token.is( Scanner.TokenType.INIT ) ) {
if ( initSequence == null ) {
initSequence = new SequenceStatement( getContext() );
}
initSequence.addChild( parseInit() );
} else {
keepRun = false;
}
} while ( keepRun );
}
private DefinitionNode parseMain()
throws IOException, ParserException
{
getToken();
eat( Scanner.TokenType.LCURLY, "expected { after procedure identifier" );
DefinitionNode retVal = new DefinitionNode( getContext(), "main", parseProcess() );
eat( Scanner.TokenType.RCURLY, "expected } after procedure definition" );
return retVal;
}
private OLSyntaxNode parseInit()
throws IOException, ParserException
{
getToken();
eat(
Scanner.TokenType.LCURLY, "expected { after procedure identifier" );
OLSyntaxNode retVal = parseProcess();
eat(
Scanner.TokenType.RCURLY, "expected } after procedure definition" );
return retVal;
}
private DefinitionNode parseDefinition()
throws IOException, ParserException
{
getToken();
assertToken(
Scanner.TokenType.ID, "expected definition identifier" );
String definitionId = token.content();
getToken();
eat(
Scanner.TokenType.LCURLY, "expected { after definition declaration" );
DefinitionNode retVal =
new DefinitionNode(
getContext(),
definitionId,
parseProcess() );
eat(
Scanner.TokenType.RCURLY, "expected } after definition declaration" );
return retVal;
}
public OLSyntaxNode parseProcess()
throws IOException, ParserException
{
return parseParallelStatement();
}
private ParallelStatement parseParallelStatement()
throws IOException, ParserException
{
ParallelStatement stm = new ParallelStatement( getContext() );
stm.addChild( parseSequenceStatement() );
while ( token.is( Scanner.TokenType.PARALLEL ) ) {
getToken();
stm.addChild( parseSequenceStatement() );
}
return stm;
}
private SequenceStatement parseSequenceStatement()
throws IOException, ParserException
{
SequenceStatement stm = new SequenceStatement( getContext() );
stm.addChild( parseBasicStatement() );
while ( token.is( Scanner.TokenType.SEQUENCE ) ) {
getToken();
stm.addChild( parseBasicStatement() );
}
return stm;
}
private List< List< Scanner.Token > > inVariablePaths = new LinkedList< List< Scanner.Token > >();
private OLSyntaxNode parseInVariablePathProcess(
boolean withConstruct )
throws IOException, ParserException
{
OLSyntaxNode ret = null;
List< Scanner.Token > tokens = new LinkedList< Scanner.Token >();
if ( withConstruct ) {
eat( Scanner.TokenType.LPAREN, "expected (" );
while ( token.isNot( Scanner.TokenType.LCURLY ) ) {
tokens.add( token );
getToken();
}
//TODO transfer this whole buggy thing to the OOIT
tokens.remove( tokens.size() - 1 );
//getToken();
} else {
while ( token.isNot( Scanner.TokenType.LCURLY ) ) {
tokens.add( token );
getToken();
}
}
inVariablePaths.add( tokens );
eat(
Scanner.TokenType.LCURLY, "expected {" );
ret =
parseProcess();
eat(
Scanner.TokenType.RCURLY, "expected }" );
inVariablePaths.remove( inVariablePaths.size() - 1 );
return ret;
}
private OLSyntaxNode parseBasicStatement()
throws IOException, ParserException
{
OLSyntaxNode retVal = null;
if ( token.is( Scanner.TokenType.LSQUARE ) ) {
retVal = parseNDChoiceStatement();
} else if ( token.is( Scanner.TokenType.ID ) ) {
checkConstant();
String id = token.content();
getToken();
if ( token.is( Scanner.TokenType.COLON ) || token.is( Scanner.TokenType.LSQUARE ) || token.is( Scanner.TokenType.DOT ) || token.is( Scanner.TokenType.ASSIGN ) || token.is( Scanner.TokenType.POINTS_TO ) || token.is( Scanner.TokenType.DEEP_COPY_LEFT ) || token.is( Scanner.TokenType.DECREMENT ) || token.is( Scanner.TokenType.CHOICE ) ) {
retVal = parseAssignOrDeepCopyOrPointerStatement( _parseVariablePath( id ) );
} else if ( token.is( Scanner.TokenType.LPAREN ) ) {
retVal = parseInputOperationStatement( id );
} else if ( token.is( Scanner.TokenType.AT ) ) {
getToken();
retVal =
parseOutputOperationStatement( id );
} else {
retVal = new DefinitionCallStatement( getContext(), id );
}
} else if ( token.is( Scanner.TokenType.WITH ) ) {
getToken();
retVal =
parseInVariablePathProcess( true );
} else if ( token.is( Scanner.TokenType.DOT ) && inVariablePaths.size() > 0 ) {
retVal = parseAssignOrDeepCopyOrPointerStatement( parsePrefixedVariablePath() );
} else if ( token.is( Scanner.TokenType.CHOICE ) ) { // Pre increment: ++i
getToken();
retVal = new PreIncrementStatement( getContext(), parseVariablePath() );
} else if ( token.is( Scanner.TokenType.DECREMENT ) ) { // Pre decrement
getToken();
retVal =
new PreDecrementStatement( getContext(), parseVariablePath() );
} else if ( token.is( Scanner.TokenType.SYNCHRONIZED ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
assertToken(
Scanner.TokenType.ID, "expected lock id" );
String id = token.content();
getToken();
eat(
Scanner.TokenType.RPAREN, "expected )" );
eat(
Scanner.TokenType.LCURLY, "expected {" );
retVal =
new SynchronizedStatement( getContext(), id, parseProcess() );
eat(
Scanner.TokenType.RCURLY, "expected }" );
} else if ( token.is( Scanner.TokenType.UNDEF ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
checkConstant();
retVal =
new UndefStatement( getContext(), parseVariablePath() );
eat(
Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.FOR ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
OLSyntaxNode init = parseProcess();
eat(
Scanner.TokenType.COMMA, "expected ," );
OLSyntaxNode condition = parseCondition();
eat(
Scanner.TokenType.COMMA, "expected ," );
OLSyntaxNode post = parseProcess();
eat(
Scanner.TokenType.RPAREN, "expected )" );
OLSyntaxNode body = parseBasicStatement();
retVal =
new ForStatement( getContext(), init, condition, post, body );
} else if ( token.is( Scanner.TokenType.SPAWN ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
VariablePathNode indexVariablePath = parseVariablePath();
assertToken( Scanner.TokenType.ID, "expected over" );
if ( token.isKeyword( "over" ) == false ) {
throwException( "expected over" );
}
getToken();
OLSyntaxNode upperBoundExpression = parseExpression();
eat( Scanner.TokenType.RPAREN, "expected )" );
VariablePathNode inVariablePath = null;
if ( token.isKeyword( "in" ) ) {
getToken();
inVariablePath = parseVariablePath();
}
eat( Scanner.TokenType.LCURLY, "expected {" );
OLSyntaxNode process = parseProcess();
eat( Scanner.TokenType.RCURLY, "expected }" );
retVal = new SpawnStatement(
getContext(),
indexVariablePath,
upperBoundExpression,
inVariablePath,
process
);
} else if ( token.is( Scanner.TokenType.FOREACH ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
VariablePathNode keyPath = parseVariablePath();
eat(
Scanner.TokenType.COLON, "expected :" );
VariablePathNode targetPath = parseVariablePath();
eat(
Scanner.TokenType.RPAREN, "expected )" );
OLSyntaxNode body = parseBasicStatement();
retVal =
new ForEachStatement( getContext(), keyPath, targetPath, body );
} else if ( token.is( Scanner.TokenType.LINKIN ) ) {
retVal = parseLinkInStatement();
} else if ( token.is( Scanner.TokenType.CURRENT_HANDLER ) ) {
getToken();
retVal =
new CurrentHandlerStatement( getContext() );
} else if ( token.is( Scanner.TokenType.NULL_PROCESS ) ) {
getToken();
retVal =
new NullProcessStatement( getContext() );
} else if ( token.is( Scanner.TokenType.EXIT ) ) {
getToken();
retVal =
new ExitStatement( getContext() );
} else if ( token.is( Scanner.TokenType.WHILE ) ) {
retVal = parseWhileStatement();
} else if ( token.is( Scanner.TokenType.LINKOUT ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
assertToken(
Scanner.TokenType.ID, "expected link identifier" );
retVal =
new LinkOutStatement( getContext(), token.content() );
getToken();
eat(
Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.LPAREN ) ) {
getToken();
retVal =
parseProcess();
eat(
Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.LCURLY ) ) {
getToken();
retVal =
parseProcess();
eat(
Scanner.TokenType.RCURLY, "expected }" );
} else if ( token.is( Scanner.TokenType.SCOPE ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
checkConstant();
assertToken(
Scanner.TokenType.ID, "expected scope identifier" );
String id = token.content();
getToken();
eat(
Scanner.TokenType.RPAREN, "expected )" );
eat(
Scanner.TokenType.LCURLY, "expected {" );
retVal =
new Scope( getContext(), id, parseProcess() );
eat(
Scanner.TokenType.RCURLY, "expected }" );
} else if ( token.is( Scanner.TokenType.COMPENSATE ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
checkConstant();
assertToken(
Scanner.TokenType.ID, "expected scope identifier" );
retVal =
new CompensateStatement( getContext(), token.content() );
getToken();
eat(
Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.THROW ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
checkConstant();
assertToken(
Scanner.TokenType.ID, "expected fault identifier" );
String faultName = token.content();
getToken();
if ( token.is( Scanner.TokenType.RPAREN ) ) {
retVal = new ThrowStatement( getContext(), faultName );
} else {
eat( Scanner.TokenType.COMMA, "expected , or )" );
OLSyntaxNode expression = parseExpression();
/*assertToken( Scanner.TokenType.ID, "expected variable path" );
String varId = token.content();
getToken();
VariablePathNode path = parseVariablePath( varId );*/
retVal =
new ThrowStatement( getContext(), faultName, expression );
}
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.INSTALL ) ) {
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
retVal =
new InstallStatement( getContext(), parseInstallFunction() );
eat(
Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.IF ) ) {
IfStatement stm = new IfStatement( getContext() );
OLSyntaxNode cond;
OLSyntaxNode node;
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
cond =
parseCondition();
eat(
Scanner.TokenType.RPAREN, "expected )" );
node =
parseBasicStatement();
stm.addChild(
new Pair<OLSyntaxNode, OLSyntaxNode>( cond, node ) );
boolean keepRun = true;
while ( token.is( Scanner.TokenType.ELSE ) && keepRun ) {
getToken();
if ( token.is( Scanner.TokenType.IF ) ) { // else if branch
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
cond =
parseCondition();
eat(
Scanner.TokenType.RPAREN, "expected )" );
node =
parseBasicStatement();
stm.addChild(
new Pair<OLSyntaxNode, OLSyntaxNode>( cond, node ) );
} else { // else branch
keepRun = false;
stm.setElseProcess( parseBasicStatement() );
}
}
retVal = stm;
}
if ( retVal == null ) {
throwException( "expected basic statement" );
}
return retVal;
}
private InstallFunctionNode parseInstallFunction()
throws IOException, ParserException
{
boolean backup = insideInstallFunction;
insideInstallFunction = true;
List< Pair< String, OLSyntaxNode > > vec =
new LinkedList< Pair< String, OLSyntaxNode > >();
boolean keepRun = true;
List< String > names = new LinkedList< String >();
OLSyntaxNode handler;
while( keepRun ) {
do {
if ( token.is( Scanner.TokenType.THIS ) ) {
names.add( null );
} else if ( token.is( Scanner.TokenType.ID ) ) {
names.add( token.content() );
} else {
throwException( "expected fault identifier or this" );
}
getToken();
} while( token.isNot( Scanner.TokenType.ARROW ) );
getToken(); // eat the arrow
handler = parseProcess();
for( String name : names ) {
vec.add( new Pair< String, OLSyntaxNode >( name, handler ) );
}
names.clear();
if ( token.is( Scanner.TokenType.COMMA ) ) {
getToken();
} else {
keepRun = false;
}
}
insideInstallFunction = backup;
return new InstallFunctionNode( vec.toArray( new Pair[ vec.size() ] ) );
}
private OLSyntaxNode parseAssignOrDeepCopyOrPointerStatement( VariablePathNode path )
throws IOException, ParserException
{
OLSyntaxNode retVal = null;
if ( token.is( Scanner.TokenType.ASSIGN ) ) {
getToken();
retVal =
new AssignStatement( getContext(), path, parseExpression() );
} else if ( token.is( Scanner.TokenType.CHOICE ) ) {
getToken();
retVal =
new PostIncrementStatement( getContext(), path );
} else if ( token.is( Scanner.TokenType.DECREMENT ) ) {
getToken();
retVal =
new PostDecrementStatement( getContext(), path );
} else if ( token.is( Scanner.TokenType.POINTS_TO ) ) {
getToken();
retVal =
new PointerStatement( getContext(), path, parseVariablePath() );
} else if ( token.is( Scanner.TokenType.DEEP_COPY_LEFT ) ) {
getToken();
retVal =
new DeepCopyStatement( getContext(), path, parseVariablePath() );
} else {
throwException( "expected = or -> or << or
}
return retVal;
}
private VariablePathNode parseVariablePath()
throws ParserException, IOException
{
if ( token.is( Scanner.TokenType.DOT ) ) {
return parsePrefixedVariablePath();
}
assertToken( Scanner.TokenType.ID, "Expected variable path" );
String varId = token.content();
getToken();
return _parseVariablePath( varId );
}
private VariablePathNode _parseVariablePath( String varId )
throws IOException, ParserException
{
OLSyntaxNode expr = null;
VariablePathNode path = null;
if ( varId.equals( Constants.GLOBAL ) ) {
path = new VariablePathNode( getContext(), true );
} else {
path = new VariablePathNode( getContext(), false );
if ( token.is( Scanner.TokenType.LSQUARE ) ) {
getToken();
expr =
parseExpression();
eat(
Scanner.TokenType.RSQUARE, "expected ]" );
}
path.append( new Pair<OLSyntaxNode, OLSyntaxNode>(
new ConstantStringExpression( getContext(), varId ), expr ) );
}
OLSyntaxNode nodeExpr = null;
while ( token.is( Scanner.TokenType.DOT ) ) {
getToken();
if ( token.is( Scanner.TokenType.ID ) ) {
nodeExpr = new ConstantStringExpression( getContext(), token.content() );
} else if ( token.is( Scanner.TokenType.LPAREN ) ) {
getToken();
nodeExpr =
parseExpression();
assertToken(
Scanner.TokenType.RPAREN, "expected )" );
} else {
assertToken( Scanner.TokenType.ID, "expected nested node identifier" );
}
getToken();
if ( token.is( Scanner.TokenType.LSQUARE ) ) {
getToken();
expr =
parseExpression();
eat(
Scanner.TokenType.RSQUARE, "expected ]" );
} else {
expr = null;
}
path.append(
new Pair<OLSyntaxNode, OLSyntaxNode>( nodeExpr, expr ) );
}
return path;
}
private VariablePathNode parsePrefixedVariablePath()
throws IOException, ParserException
{
int i = inVariablePaths.size() - 1;
List< Scanner.Token > tokens = new ArrayList< Scanner.Token >();
try {
tokens.addAll( inVariablePaths.get( i ) );
} catch( IndexOutOfBoundsException e ) {
throwException( "Prefixed variable paths must be inside a with block" );
}
while ( tokens.get( 0 ).is( Scanner.TokenType.DOT ) ) {
i
tokens.addAll( 0, inVariablePaths.get( i ) );
}
addTokens( tokens );
addTokens( Arrays.asList( new Scanner.Token( Scanner.TokenType.DOT ) ) );
getToken();
String varId = token.content();
getToken();
return _parseVariablePath( varId );
}
private NDChoiceStatement parseNDChoiceStatement()
throws IOException, ParserException
{
NDChoiceStatement stm = new NDChoiceStatement( getContext() );
OLSyntaxNode inputGuard = null;
OLSyntaxNode process;
while ( token.is( Scanner.TokenType.LSQUARE ) ) {
getToken(); // Eat [
if ( token.is( Scanner.TokenType.LINKIN ) ) {
inputGuard = parseLinkInStatement();
} else if ( token.is( Scanner.TokenType.ID ) ) {
String id = token.content();
getToken();
inputGuard =
parseInputOperationStatement( id );
} else {
throwException( "expected input guard" );
}
eat( Scanner.TokenType.RSQUARE, "] expected" );
eat(
Scanner.TokenType.LCURLY, "expected {" );
process =
parseProcess();
eat(
Scanner.TokenType.RCURLY, "expected }" );
stm.addChild( new Pair<OLSyntaxNode, OLSyntaxNode>( inputGuard, process ) );
}
return stm;
}
private LinkInStatement parseLinkInStatement()
throws IOException, ParserException
{
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
assertToken(
Scanner.TokenType.ID, "expected link identifier" );
LinkInStatement stm = new LinkInStatement( getContext(), token.content() );
getToken();
eat(
Scanner.TokenType.RPAREN, "expected )" );
return stm;
}
private OLSyntaxNode parseInputOperationStatement( String id )
throws IOException, ParserException
{
ParsingContext context = getContext();
VariablePathNode inputVarPath = parseOperationVariablePathParameter();
OLSyntaxNode stm;
if ( token.is( Scanner.TokenType.LPAREN ) ) { // Request Response operation
OLSyntaxNode outputExpression = parseOperationExpressionParameter();
OLSyntaxNode process;
eat(
Scanner.TokenType.LCURLY, "expected {" );
process =
parseProcess();
eat(
Scanner.TokenType.RCURLY, "expected }" );
stm =
new RequestResponseOperationStatement(
context, id, inputVarPath, outputExpression, process );
} else { // One Way operation
stm = new OneWayOperationStatement( context, id, inputVarPath );
}
return stm;
}
/**
* @return The VariablePath parameter of the statement. May be null.
* @throws IOException
* @throws ParserException
*/
private VariablePathNode parseOperationVariablePathParameter()
throws IOException, ParserException
{
VariablePathNode ret = null;
eat(
Scanner.TokenType.LPAREN, "expected (" );
if ( token.is( Scanner.TokenType.ID ) ) {
ret = parseVariablePath();
} else if ( token.is( Scanner.TokenType.DOT ) ) {
ret = parsePrefixedVariablePath();
}
eat( Scanner.TokenType.RPAREN, "expected )" );
return ret;
}
private OLSyntaxNode parseOperationExpressionParameter()
throws IOException, ParserException
{
OLSyntaxNode ret = null;
eat(
Scanner.TokenType.LPAREN, "expected (" );
if ( token.isNot( Scanner.TokenType.RPAREN ) ) {
ret = parseExpression();
}
eat( Scanner.TokenType.RPAREN, "expected )" );
return ret;
}
private OLSyntaxNode parseOutputOperationStatement( String id )
throws IOException, ParserException
{
ParsingContext context = getContext();
String outputPortId = token.content();
getToken();
OLSyntaxNode outputExpression = parseOperationExpressionParameter();
OLSyntaxNode stm;
if ( token.is( Scanner.TokenType.LPAREN ) ) { // Solicit Response operation
VariablePathNode inputVarPath = parseOperationVariablePathParameter();
InstallFunctionNode function = null;
if ( token.is( Scanner.TokenType.LSQUARE ) ) {
eat( Scanner.TokenType.LSQUARE, "expected [" );
function =
parseInstallFunction();
eat(
Scanner.TokenType.RSQUARE, "expected ]" );
}
stm = new SolicitResponseOperationStatement(
getContext(),
id,
outputPortId,
outputExpression,
inputVarPath,
function );
} else { // Notification operation
stm = new NotificationOperationStatement( context, id, outputPortId, outputExpression );
}
return stm;
}
private OLSyntaxNode parseWhileStatement()
throws IOException, ParserException
{
ParsingContext context = getContext();
OLSyntaxNode cond, process;
getToken();
eat(
Scanner.TokenType.LPAREN, "expected (" );
cond =
parseCondition();
eat(
Scanner.TokenType.RPAREN, "expected )" );
eat(
Scanner.TokenType.LCURLY, "expected {" );
process =
parseProcess();
eat(
Scanner.TokenType.RCURLY, "expected }" );
return new WhileStatement( context, cond, process );
}
private OLSyntaxNode parseCondition()
throws IOException, ParserException
{
OrConditionNode orCond = new OrConditionNode( getContext() );
orCond.addChild( parseAndCondition() );
while ( token.is( Scanner.TokenType.OR ) ) {
getToken();
orCond.addChild( parseAndCondition() );
}
return orCond;
}
private OLSyntaxNode parseAndCondition()
throws IOException, ParserException
{
AndConditionNode andCond = new AndConditionNode( getContext() );
andCond.addChild( parseBasicCondition() );
while ( token.is( Scanner.TokenType.AND ) ) {
getToken();
andCond.addChild( parseBasicCondition() );
}
return andCond;
}
private OLSyntaxNode parseNotCondition()
throws IOException, ParserException
{
OLSyntaxNode retVal = null;
if ( token.is( Scanner.TokenType.LPAREN ) ) {
getToken();
retVal =
parseCondition();
eat(
Scanner.TokenType.RPAREN, "expected )" );
} else {
retVal = new ExpressionConditionNode( getContext(), parseExpression() );
}
return new NotConditionNode( getContext(), retVal );
}
private OLSyntaxNode parseBasicCondition()
throws IOException, ParserException
{
OLSyntaxNode retVal = null;
if ( token.is( Scanner.TokenType.LPAREN ) ) {
getToken();
retVal =
parseCondition();
eat(
Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.NOT ) ) {
getToken();
retVal =
parseNotCondition();
} else {
Scanner.TokenType opType;
OLSyntaxNode expr1;
expr1 =
parseExpression();
opType =
token.type();
if ( opType != Scanner.TokenType.EQUAL && opType != Scanner.TokenType.LANGLE &&
opType != Scanner.TokenType.RANGLE && opType != Scanner.TokenType.MAJOR_OR_EQUAL &&
opType != Scanner.TokenType.MINOR_OR_EQUAL && opType != Scanner.TokenType.NOT_EQUAL ) {
retVal = new ExpressionConditionNode( getContext(), expr1 );
} else {
OLSyntaxNode expr2;
getToken();
expr2 =
parseExpression();
retVal =
new CompareConditionNode( getContext(), expr1, expr2, opType );
}
}
if ( retVal == null ) {
throwException( "expected condition" );
}
return retVal;
}
/**
* @todo Check if negative integer handling is appropriate
*/
private OLSyntaxNode parseExpression()
throws IOException, ParserException
{
boolean keepRun = true;
SumExpressionNode sum = new SumExpressionNode( getContext() );
sum.add( parseProductExpression() );
while ( keepRun ) {
if ( token.is( Scanner.TokenType.PLUS ) ) {
getToken();
sum.add( parseProductExpression() );
} else if ( token.is( Scanner.TokenType.MINUS ) ) {
getToken();
sum.subtract( parseProductExpression() );
} else if ( token.is( Scanner.TokenType.INT ) ) { // e.g. i -1
int value = Integer.parseInt( token.content() );
// We add it, because it's already negative.
if ( value < 0 ) {
sum.add( parseProductExpression() );
} else { // e.g. i 1
throwException( "expected expression operator" );
}
} else if ( token.is( Scanner.TokenType.REAL ) ) { // e.g. i -1
double value = Double.parseDouble( token.content() );
// We add it, because it's already negative.
if ( value < 0 ) {
sum.add( parseProductExpression() );
} else { // e.g. i 1
throwException( "expected expression operator" );
}
} else {
keepRun = false;
}
}
return sum;
}
private OLSyntaxNode parseFactor()
throws IOException, ParserException
{
OLSyntaxNode retVal = null;
VariablePathNode path = null;
checkConstant();
if ( token.is( Scanner.TokenType.ID ) || token.is( Scanner.TokenType.DOT ) ) {
path = parseVariablePath();
} else if ( insideInstallFunction && token.is( Scanner.TokenType.CARET ) ) {
getToken();
path = parseVariablePath();
retVal = new InstallFixedVariableExpressionNode( getContext(), path );
return retVal;
}
if ( path != null ) {
if ( token.is( Scanner.TokenType.CHOICE ) ) { // Post increment
getToken();
retVal =
new PostIncrementStatement( getContext(), path );
} else if ( token.is( Scanner.TokenType.DECREMENT ) ) {
getToken();
retVal =
new PostDecrementStatement( getContext(), path );
} else if ( token.is( Scanner.TokenType.ASSIGN ) ) {
getToken();
retVal =
new AssignStatement( getContext(), path, parseExpression() );
} else {
retVal = new VariableExpressionNode( getContext(), path );
}
} else if ( token.is( Scanner.TokenType.STRING ) ) {
retVal = new ConstantStringExpression( getContext(), token.content() );
getToken();
} else if ( token.is( Scanner.TokenType.INT ) ) {
retVal = new ConstantIntegerExpression( getContext(), Integer.parseInt( token.content() ) );
getToken();
} else if ( token.is( Scanner.TokenType.REAL ) ) {
retVal = new ConstantRealExpression( getContext(), Double.parseDouble( token.content() ) );
getToken();
} else if ( token.is( Scanner.TokenType.LPAREN ) ) {
getToken();
retVal = parseExpression();
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.HASH ) ) {
getToken();
retVal = new ValueVectorSizeExpressionNode(
getContext(),
parseVariablePath()
);
} else if ( token.is( Scanner.TokenType.CHOICE ) ) { // Pre increment: ++i
getToken();
retVal = new PreIncrementStatement( getContext(), parseVariablePath() );
} else if ( token.is( Scanner.TokenType.DECREMENT ) ) { // Pre decrement
getToken();
retVal = new PreDecrementStatement( getContext(), parseVariablePath() );
} else if ( token.is( Scanner.TokenType.IS_DEFINED ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
retVal = new IsTypeExpressionNode(
getContext(),
IsTypeExpressionNode.CheckType.DEFINED,
parseVariablePath()
);
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.IS_INT ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
retVal = new IsTypeExpressionNode(
getContext(),
IsTypeExpressionNode.CheckType.INT,
parseVariablePath()
);
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.IS_REAL ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
retVal = new IsTypeExpressionNode(
getContext(),
IsTypeExpressionNode.CheckType.REAL,
parseVariablePath()
);
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.IS_STRING ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
retVal = new IsTypeExpressionNode(
getContext(),
IsTypeExpressionNode.CheckType.STRING,
parseVariablePath()
);
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.CAST_INT ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
retVal = new TypeCastExpressionNode( getContext(), NativeType.INT, parseExpression() );
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.CAST_REAL ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
retVal = new TypeCastExpressionNode(
getContext(),
NativeType.DOUBLE,
parseExpression()
);
eat( Scanner.TokenType.RPAREN, "expected )" );
} else if ( token.is( Scanner.TokenType.CAST_STRING ) ) {
getToken();
eat( Scanner.TokenType.LPAREN, "expected (" );
retVal = new TypeCastExpressionNode(
getContext(),
NativeType.STRING,
parseExpression()
);
eat( Scanner.TokenType.RPAREN, "expected )" );
}
if ( retVal == null ) {
throwException( "expected expression" );
}
return retVal;
}
private OLSyntaxNode parseProductExpression()
throws IOException, ParserException
{
ProductExpressionNode product = new ProductExpressionNode( getContext() );
product.multiply( parseFactor() );
boolean keepRun = true;
while ( keepRun ) {
if ( token.is( Scanner.TokenType.ASTERISK ) ) {
getToken();
product.multiply( parseFactor() );
} else if ( token.is( Scanner.TokenType.DIVIDE ) ) {
getToken();
product.divide( parseFactor() );
} else if ( token.is( Scanner.TokenType.PERCENT_SIGN ) ) {
getToken();
product.modulo( parseFactor() );
} else {
keepRun = false;
}
}
return product;
}
}
|
package com.john.waveview;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.os.Handler;
import android.os.Message;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
public class WaveView extends View {
private Path aboveWavePath = new Path();
private Path blowWavePath = new Path();
private Paint aboveWavePaint = new Paint();
private Paint blowWavePaint = new Paint();
private final int default_above_wave_alpha = 50;
private final int default_blow_wave_alpha = 30;
private final int default_above_wave_color = Color.WHITE;
private final int default_blow_wave_color = Color.WHITE;
private final int default_progress = 80;
private int waveToTop;
private int aboveWaveColor;
private int blowWaveColor;
private int progress;
private int offsetIndex = 0;
// wave animation
private float[] aboveOffset = {1.5f, 2.0f, 2.5f, 3.0f, 3.5f, 4.0f, 4.5f, 5.0f, 5.5f, 6.0f, 6.5f, 7.0f, 7.5f, 8.0f, 8.5f, 9.0f, 9.5f, 10.0f, 10.5f, 11.0f, 11.5f, 12.0f, 12.5f, 13.0f, 13.5f, 14.0f, 14.5f, 15.0f, 15.5f, 16.0f, 16.5f, 17.0f, 17.5f, 18.0f, 18.5f, 19.0f, 19.5f, 20.0f, 20.5f, 21.0f, 21.5f, 22.0f, 22.5f, 23.0f, 23.5f, 24.0f, 24.5f, 25.0f, 25.5f, 26.0f, 25.469612f, 24.936907f, 24.375708f, 23.804106f, 23.272884f, 22.717838f, 22.185139f, 21.634232f, 21.089384f, 20.57109f, 19.990925f, 19.41971f, 18.889145f, 18.29396f, 17.757067f, 17.174246f, 16.607258f, 16.057583f, 15.539549f, 14.972029f, 14.440879f, 13.883532f, 13.358805f, 12.847078f, 12.265658f, 11.7357855f, 11.228427f, 10.720737f, 10.145882f, 9.637184f, 9.089122f, 8.514334f, 7.995496f, 7.482301f, 6.907622f, 6.399367f, 5.8826733f, 5.318535f, 4.807485f, 4.2638555f, 3.7463584f, 3.1917238f, 2.68992f, 2.118116f, 1.596749f, 1.089212f, 0.5654556f, 0.013689734f, -0.509612f, -1.0505137f};
private float[] blowOffset = {4.5f, 5.0f, 5.5f, 6.0f, 6.5f, 7.0f, 7.5f, 8.0f, 8.5f, 9.0f, 9.5f, 10.0f, 10.5f, 11.0f, 11.5f, 12.0f, 12.5f, 13.0f, 13.5f, 14.0f, 14.5f, 15.0f, 15.5f, 16.0f, 16.5f, 17.0f, 17.5f, 18.0f, 18.5f, 19.0f, 19.5f, 20.0f, 20.5f, 21.0f, 21.5f, 22.0f, 22.5f, 23.0f, 23.5f, 24.0f, 24.5f, 25.0f, 25.5f, 26.0f, 26.5f, 27.0f, 27.5f, 28.0f, 28.5f, 29.0f, 28.433153f, 27.924328f, 27.401146f, 26.88904f, 26.355553f, 25.840899f, 25.30798f, 24.737104f, 24.142794f, 23.630413f, 23.121172f, 22.574392f, 22.051369f, 21.515535f, 20.93495f, 20.357899f, 19.817375f, 19.288565f, 18.709202f, 18.18553f, 17.651005f, 17.069977f, 16.517073f, 15.94023f, 15.42995f, 14.885934f, 14.370088f, 13.824445f, 13.230036f, 12.640225f, 12.082165f, 11.494119f, 10.986025f, 10.465705f, 9.866534f, 9.293761f, 8.783006f, 8.1871195f, 7.588639f, 7.0171866f, 6.4263854f, 5.898551f, 5.311175f, 4.7600684f, 4.2139015f, 3.6422157f, 3.0520093f, 2.4777887f, 1.9236779f, 1.3584205f};
/**
* wave crest
*/
private final int x_zoom = 150;
/**
* wave length
*/
private final int y_zoom = 6;
private final float offset = 0.5f;
private final float max_right = x_zoom * offset;
// refresh thread
private boolean refreshable = true;
private Thread refreshThread;
private final int REFRESH = 100;
private Handler handler = new Handler(new Handler.Callback() {
@Override
public boolean handleMessage(Message msg) {
if (msg.what == REFRESH) {
invalidate();
}
return false;
}
});
public WaveView(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.waveViewStyle);
}
public WaveView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
//load styled attributes.
final TypedArray attributes = context.getTheme().obtainStyledAttributes(attrs, R.styleable.WaveView, defStyle, 0);
aboveWaveColor = attributes.getColor(R.styleable.WaveView_above_wave_color, default_above_wave_color);
blowWaveColor = attributes.getColor(R.styleable.WaveView_blow_wave_color, default_blow_wave_color);
progress = attributes.getInt(R.styleable.WaveView_progress, default_progress);
initializePainters();
refreshThread = new RefreshThread();
refreshThread.start();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
waveToTop = (int) (getHeight() * (1f - progress / 100f));
canvas.drawPath(blowWavePath, blowWavePaint);
canvas.drawPath(aboveWavePath, aboveWavePaint);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension(measure(widthMeasureSpec, true), measure(heightMeasureSpec, false));
}
private int measure(int measureSpec, boolean isWidth) {
int result;
int mode = MeasureSpec.getMode(measureSpec);
int size = MeasureSpec.getSize(measureSpec);
int padding = isWidth ? getPaddingLeft() + getPaddingRight() : getPaddingTop() + getPaddingBottom();
if (mode == MeasureSpec.EXACTLY) {
result = size;
} else {
result = isWidth ? getSuggestedMinimumWidth() : getSuggestedMinimumHeight();
result += padding;
if (mode == MeasureSpec.AT_MOST) {
if (isWidth) {
result = Math.max(result, size);
} else {
result = Math.min(result, size);
}
}
}
return result;
}
private void initializePainters() {
aboveWavePaint.setColor(aboveWaveColor);
aboveWavePaint.setAlpha(default_above_wave_alpha);
aboveWavePaint.setStyle(Paint.Style.FILL);
aboveWavePaint.setAntiAlias(true);
blowWavePaint.setColor(blowWaveColor);
blowWavePaint.setAlpha(default_blow_wave_alpha);
blowWavePaint.setStyle(Paint.Style.FILL);
blowWavePaint.setAntiAlias(true);
}
private void calculatePath() {
aboveWavePath.reset();
blowWavePath.reset();
aboveWavePath.moveTo(getLeft(), getHeight());
for (float i = 0; x_zoom * i <= getRight() + max_right; i += offset) {
aboveWavePath.lineTo((x_zoom * i), (float) (y_zoom * Math.cos(i + aboveOffset[offsetIndex])) + waveToTop);
}
aboveWavePath.lineTo(getRight(), getHeight());
blowWavePath.moveTo(getLeft(), getHeight());
for (float i = 0; x_zoom * i <= getRight() + max_right; i += offset) {
blowWavePath.lineTo((x_zoom * i), (float) (y_zoom * Math.cos(i + blowOffset[offsetIndex])) + waveToTop);
}
blowWavePath.lineTo(getRight(), getHeight());
}
public void setProgress(int progress) {
this.progress = progress > 100 ? 100 : progress;
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
refreshable = false;
refreshThread.interrupt();
}
class RefreshThread extends Thread {
@Override
public void run() {
while (refreshable) {
try {
sleep(100);
offsetIndex++;
if (offsetIndex == aboveOffset.length) {
offsetIndex = 0;
}
calculatePath();
handler.sendEmptyMessage(REFRESH);
} catch (InterruptedException e) {
}
}
}
}
}
|
//$HeadURL: svn+ssh://mschneider@svn.wald.intevation.org/deegree/base/trunk/resources/eclipse/svn_classfile_header_template.xml $
package org.deegree.services.wps.capabilities;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.deegree.commons.tom.ows.CodeType;
import org.deegree.commons.utils.Pair;
import org.deegree.process.jaxb.java.ProcessDefinition;
import org.deegree.process.jaxb.java.ProcessDefinition.Metadata;
import org.deegree.services.controller.OGCFrontController;
import org.deegree.services.controller.ows.capabilities.OWSCapabilitiesXMLAdapter;
import org.deegree.services.controller.ows.capabilities.OWSOperation;
import org.deegree.services.jaxb.controller.DCPType;
import org.deegree.services.jaxb.metadata.DeegreeServicesMetadataType;
import org.deegree.services.jaxb.metadata.ServiceIdentificationType;
import org.deegree.services.wps.WPSProcess;
public class CapabilitiesXMLWriter extends OWSCapabilitiesXMLAdapter {
private static final String OGC_NS = "http:
private static final String OGC_PREFIX = "ogc";
private static final String OWS_NS = "http:
private static final String OWS_PREFIX = "ows";
private static final String WPS_NS = "http:
private static final String WPS_PREFIX = "wps";
private static final String GML_PREFIX = "gml";
private static final String GML_NS = "http:
private static final String XSI_NS = "http:
private CapabilitiesXMLWriter() {
// avoid instantiation
}
/**
* @param writer
* @param processes
* @param serviceMetadata
* @param serviceWSDLURL
* location of a WSDL document which describes the entire service, may be null
* @throws XMLStreamException
*/
public static void export100( XMLStreamWriter writer, Map<CodeType, WPSProcess> processes,
DeegreeServicesMetadataType serviceMetadata, String serviceWSDLURL )
throws XMLStreamException {
writer.writeStartElement( WPS_PREFIX, "Capabilities", WPS_NS );
writer.writeNamespace( OWS_PREFIX, OWS_NS );
writer.writeNamespace( OGC_PREFIX, OGC_NS );
writer.writeNamespace( GML_PREFIX, GML_NS );
writer.writeNamespace( "xlink", XLN_NS );
writer.writeNamespace( "xsi", XSI_NS );
writer.writeAttribute( "service", "WPS" );
writer.writeAttribute( "version", "1.0.0" );
writer.writeAttribute( "xml:lang", "en" );
writer.writeAttribute( XSI_NS, "schemaLocation",
"http:
exportServiceIdentification( writer, serviceMetadata.getServiceIdentification() );
exportServiceProvider110( writer, serviceMetadata.getServiceProvider() );
exportOperationsMetadata( writer );
exportProcessOfferings( writer, processes );
exportLanguages( writer );
if ( serviceWSDLURL != null ) {
writer.writeStartElement( WPS_NS, "WSDL" );
writer.writeAttribute( "xlink:href", serviceWSDLURL );
writer.writeEndElement();
}
writer.writeEndElement(); // Capabilities
}
private static void exportProcessOfferings( XMLStreamWriter writer, Map<CodeType, WPSProcess> processes )
throws XMLStreamException {
writer.writeStartElement( WPS_NS, "ProcessOfferings" );
for ( WPSProcess process : processes.values() ) {
ProcessDefinition processDef = process.getDescription();
writer.writeStartElement( WPS_NS, "Process" );
writer.writeAttribute( WPS_NS, "processVersion", processDef.getProcessVersion() );
// "ows:Identifier" (minOccurs="1", maxOccurs="1")
writer.writeStartElement( OWS_NS, "Identifier" );
if ( processDef.getIdentifier().getCodeSpace() != null ) {
writer.writeAttribute( "codeSpace", processDef.getIdentifier().getCodeSpace() );
}
writer.writeCharacters( processDef.getIdentifier().getValue() );
writer.writeEndElement();
// "ows:Title" (minOccurs="1", maxOccurs="1")
if ( processDef.getTitle() != null ) {
writer.writeStartElement( OWS_NS, "Title" );
if ( processDef.getTitle().getLang() != null ) {
writer.writeAttribute( "xml:lang", processDef.getTitle().getLang() );
}
writer.writeCharacters( processDef.getTitle().getValue() );
writer.writeEndElement();
}
// "ows:Abstract" (minOccurs="0", maxOccurs="1")
if ( processDef.getAbstract() != null ) {
writer.writeStartElement( OWS_NS, "Abstract" );
if ( processDef.getAbstract().getLang() != null ) {
writer.writeAttribute( "xml:lang", processDef.getAbstract().getLang() );
}
writer.writeCharacters( processDef.getAbstract().getValue() );
writer.writeEndElement();
}
// "ows:Metadata" (minOccurs="0", maxOccurs="unbounded")
if ( processDef.getMetadata() != null ) {
for ( Metadata metadata : processDef.getMetadata() ) {
writer.writeStartElement( OWS_NS, "Metadata" );
if ( metadata.getAbout() != null ) {
writer.writeAttribute( "about", metadata.getAbout() );
}
if ( metadata.getHref() != null ) {
writer.writeAttribute( XLN_NS, "href", metadata.getHref() );
}
writer.writeEndElement();
}
}
// "wps:Profile" (minOccurs="0", maxOccurs="unbounded")
if ( processDef.getProfile() != null ) {
for ( String profile : processDef.getProfile() ) {
writeElement( writer, WPS_NS, "Profile", profile );
}
}
// "wps:WSDL" (minOccurs="0", maxOccurs="unbounded")
if ( processDef.getWSDL() != null ) {
writeElement( writer, WPS_NS, "WSDL", XLN_NS, "href", processDef.getWSDL() );
}
writer.writeEndElement(); // Process
}
writer.writeEndElement(); // ProcessOfferings
}
private static void exportOperationsMetadata( XMLStreamWriter writer )
throws XMLStreamException {
List<OWSOperation> operations = new LinkedList<OWSOperation>();
DCPType dcp = new DCPType();
dcp.setHTTPGet( OGCFrontController.getHttpGetURL() );
dcp.setHTTPPost( OGCFrontController.getHttpPostURL() );
List<Pair<String, List<String>>> params = new ArrayList<Pair<String, List<String>>>();
List<Pair<String, List<String>>> constraints = new ArrayList<Pair<String, List<String>>>();
operations.add( new OWSOperation( "GetCapabilities", dcp, params, constraints ) );
operations.add( new OWSOperation( "DescribeProcess", dcp, params, constraints ) );
operations.add( new OWSOperation( "Execute", dcp, params, constraints ) );
exportOperationsMetadata110( writer, operations );
}
private static void exportServiceIdentification( XMLStreamWriter writer, ServiceIdentificationType ident )
throws XMLStreamException {
writer.writeStartElement( OWS_NS, "ServiceIdentification" );
if ( ident == null ) {
writeElement( writer, OWS_NS, "Title", "deegree 3 WPS" );
writeElement( writer, OWS_NS, "Abstract", "deegree 3 WPS implementation" );
} else {
List<String> title = ident.getTitle();
writeElement( writer, OWS_NS, "Title", title.isEmpty() ? "deegree 3 WPS" : title.get( 0 ) );
List<String> _abstract = ident.getAbstract();
writeElement( writer, OWS_NS, "Abstract", _abstract.isEmpty() ? "deegree 3 WPS implementation"
: _abstract.get( 0 ) );
}
writeElement( writer, OWS_NS, "ServiceType", "WPS" );
writeElement( writer, OWS_NS, "ServiceTypeVersion", "1.0.0" );
writer.writeEndElement();
}
private static void exportLanguages( XMLStreamWriter writer )
throws XMLStreamException {
writer.writeStartElement( WPS_NS, "Languages" );
writer.writeStartElement( WPS_NS, "Default" );
writeElement( writer, OWS_NS, "Language", "en" );
writer.writeEndElement(); // Default
writer.writeStartElement( WPS_NS, "Supported" );
writeElement( writer, OWS_NS, "Language", "en" );
writer.writeEndElement(); // Supported
writer.writeEndElement(); // Languages
}
}
|
package org.hisp.dhis.dxf2.datavalueset;
import com.csvreader.CsvReader;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.calendar.CalendarService;
import org.hisp.dhis.category.CategoryOptionCombo;
import org.hisp.dhis.category.CategoryService;
import org.hisp.dhis.common.AuditType;
import org.hisp.dhis.common.DateRange;
import org.hisp.dhis.common.DxfNamespaces;
import org.hisp.dhis.common.IdScheme;
import org.hisp.dhis.common.IdSchemes;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.common.IdentifiableProperty;
import org.hisp.dhis.common.IllegalQueryException;
import org.hisp.dhis.commons.collection.CachingMap;
import org.hisp.dhis.commons.util.DebugUtils;
import org.hisp.dhis.commons.util.StreamUtils;
import org.hisp.dhis.dataapproval.DataApproval;
import org.hisp.dhis.dataapproval.DataApprovalService;
import org.hisp.dhis.dataapproval.DataApprovalWorkflow;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementGroup;
import org.hisp.dhis.dataset.CompleteDataSetRegistration;
import org.hisp.dhis.dataset.CompleteDataSetRegistrationService;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.LockExceptionStore;
import org.hisp.dhis.datavalue.AggregateAccessManager;
import org.hisp.dhis.datavalue.DataExportParams;
import org.hisp.dhis.datavalue.DataValue;
import org.hisp.dhis.datavalue.DataValueAudit;
import org.hisp.dhis.datavalue.DataValueService;
import org.hisp.dhis.dxf2.common.ImportOptions;
import org.hisp.dhis.dxf2.importsummary.ImportConflict;
import org.hisp.dhis.dxf2.importsummary.ImportCount;
import org.hisp.dhis.dxf2.importsummary.ImportStatus;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
import org.hisp.dhis.dxf2.pdfform.PdfDataEntryFormUtil;
import org.hisp.dhis.dxf2.utils.InputUtils;
import org.hisp.dhis.fileresource.FileResource;
import org.hisp.dhis.fileresource.FileResourceService;
import org.hisp.dhis.i18n.I18n;
import org.hisp.dhis.i18n.I18nManager;
import org.hisp.dhis.importexport.ImportStrategy;
import org.hisp.dhis.jdbc.batchhandler.DataValueAuditBatchHandler;
import org.hisp.dhis.jdbc.batchhandler.DataValueBatchHandler;
import org.hisp.dhis.node.types.CollectionNode;
import org.hisp.dhis.node.types.ComplexNode;
import org.hisp.dhis.node.types.RootNode;
import org.hisp.dhis.node.types.SimpleNode;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitGroup;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodService;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.render.DefaultRenderService;
import org.hisp.dhis.scheduling.JobConfiguration;
import org.hisp.dhis.security.acl.AclService;
import org.hisp.dhis.setting.SettingKey;
import org.hisp.dhis.setting.SystemSettingManager;
import org.hisp.dhis.system.callable.CategoryOptionComboAclCallable;
import org.hisp.dhis.system.callable.IdentifiableObjectCallable;
import org.hisp.dhis.system.callable.PeriodCallable;
import org.hisp.dhis.system.notification.NotificationLevel;
import org.hisp.dhis.system.notification.Notifier;
import org.hisp.dhis.system.util.Clock;
import org.hisp.dhis.system.util.DateUtils;
import org.hisp.dhis.system.util.ValidationUtils;
import org.hisp.dhis.user.CurrentUserService;
import org.hisp.dhis.user.User;
import org.hisp.dhis.util.ObjectUtils;
import org.hisp.quick.BatchHandler;
import org.hisp.quick.BatchHandlerFactory;
import org.hisp.staxwax.factory.XMLFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.trimToNull;
import static org.hisp.dhis.system.notification.NotificationLevel.ERROR;
import static org.hisp.dhis.system.notification.NotificationLevel.INFO;
import static org.hisp.dhis.system.notification.NotificationLevel.WARN;
import static org.hisp.dhis.system.util.DateUtils.parseDate;
/**
* Note that a mock BatchHandler factory is being injected.
*
* @author Lars Helge Overland
*/
public class DefaultDataValueSetService
implements DataValueSetService
{
private static final Log log = LogFactory.getLog( DefaultDataValueSetService.class );
private static final String ERROR_OBJECT_NEEDED_TO_COMPLETE = "Must be provided to complete data set";
private static final int CACHE_MISS_THRESHOLD = 250;
@Autowired
private IdentifiableObjectManager identifiableObjectManager;
@Autowired
private CategoryService categoryService;
@Autowired
private OrganisationUnitService organisationUnitService;
@Autowired
private PeriodService periodService;
@Autowired
private DataApprovalService approvalService;
@Autowired
private BatchHandlerFactory batchHandlerFactory;
@Autowired
private CompleteDataSetRegistrationService registrationService;
@Autowired
private CurrentUserService currentUserService;
@Autowired
private DataValueSetStore dataValueSetStore;
@Autowired
private SystemSettingManager systemSettingManager;
@Autowired
private LockExceptionStore lockExceptionStore;
@Autowired
private I18nManager i18nManager;
@Autowired
private Notifier notifier;
@Autowired
protected InputUtils inputUtils;
@Autowired
private CalendarService calendarService;
@Autowired
private DataValueService dataValueService;
@Autowired
private FileResourceService fileResourceService;
@Autowired
private AclService aclService;
@Autowired
private AggregateAccessManager accessManager;
// Set methods for test purposes
public void setBatchHandlerFactory( BatchHandlerFactory batchHandlerFactory )
{
this.batchHandlerFactory = batchHandlerFactory;
}
public void setCurrentUserService( CurrentUserService currentUserService )
{
this.currentUserService = currentUserService;
}
// DataValueSet implementation
@Override
public DataExportParams getFromUrl( Set<String> dataSets, Set<String> dataElementGroups, Set<String> periods, Date startDate, Date endDate,
Set<String> organisationUnits, boolean includeChildren, Set<String> organisationUnitGroups, Set<String> attributeOptionCombos,
boolean includeDeleted, Date lastUpdated, String lastUpdatedDuration, Integer limit, IdSchemes outputIdSchemes )
{
DataExportParams params = new DataExportParams();
if ( dataSets != null )
{
params.getDataSets().addAll( identifiableObjectManager.getObjects(
DataSet.class, IdentifiableProperty.UID, dataSets ) );
}
if ( dataElementGroups != null )
{
params.getDataElementGroups().addAll( identifiableObjectManager.getObjects(
DataElementGroup.class, IdentifiableProperty.UID, dataElementGroups ) );
}
if ( periods != null && !periods.isEmpty() )
{
params.getPeriods().addAll( periodService.reloadIsoPeriods( new ArrayList<>( periods ) ) );
}
else if ( startDate != null && endDate != null )
{
params
.setStartDate( startDate )
.setEndDate( endDate );
}
if ( organisationUnits != null )
{
params.getOrganisationUnits().addAll( identifiableObjectManager.getObjects(
OrganisationUnit.class, IdentifiableProperty.UID, organisationUnits ) );
}
if ( organisationUnitGroups != null )
{
params.getOrganisationUnitGroups().addAll( identifiableObjectManager.getObjects(
OrganisationUnitGroup.class, IdentifiableProperty.UID, organisationUnitGroups ) );
}
if ( attributeOptionCombos != null )
{
params.getAttributeOptionCombos().addAll( identifiableObjectManager.getObjects(
CategoryOptionCombo.class, IdentifiableProperty.UID, attributeOptionCombos ) );
}
return params
.setIncludeChildren( includeChildren )
.setIncludeDeleted( includeDeleted )
.setLastUpdated( lastUpdated )
.setLastUpdatedDuration( lastUpdatedDuration )
.setLimit( limit )
.setOutputIdSchemes( outputIdSchemes );
}
@Override
public void validate( DataExportParams params )
{
String violation = null;
if ( params == null )
{
throw new IllegalArgumentException( "Params cannot be null" );
}
if ( params.getDataElements().isEmpty() && params.getDataSets().isEmpty() && params.getDataElementGroups().isEmpty() )
{
violation = "At least one valid data set or data element group must be specified";
}
if ( !params.hasPeriods() && !params.hasStartEndDate() && !params.hasLastUpdated() && !params.hasLastUpdatedDuration() )
{
violation = "At least one valid period, start/end dates, last updated or last updated duration must be specified";
}
if ( params.hasPeriods() && params.hasStartEndDate() )
{
violation = "Both periods and start/end date cannot be specified";
}
if ( params.hasStartEndDate() && params.getStartDate().after( params.getEndDate() ) )
{
violation = "Start date must be before end date";
}
if ( params.hasLastUpdatedDuration() && DateUtils.getDuration( params.getLastUpdatedDuration() ) == null )
{
violation = "Duration is not valid: " + params.getLastUpdatedDuration();
}
if ( !params.hasOrganisationUnits() && !params.hasOrganisationUnitGroups() )
{
violation = "At least one valid organisation unit or organisation unit group must be specified";
}
if ( params.isIncludeChildren() && params.hasOrganisationUnitGroups() )
{
violation = "Children cannot be included for organisation unit groups";
}
if ( params.isIncludeChildren() && !params.hasOrganisationUnits() )
{
violation = "At least one valid organisation unit must be specified when children is included";
}
if ( params.hasLimit() && params.getLimit() < 0 )
{
violation = "Limit cannot be less than zero: " + params.getLimit();
}
if ( violation != null )
{
log.warn( "Validation failed: " + violation );
throw new IllegalQueryException( violation );
}
}
@Override
public void decideAccess( DataExportParams params )
{
for ( OrganisationUnit unit : params.getOrganisationUnits() )
{
if ( !organisationUnitService.isInUserHierarchy( unit ) )
{
throw new IllegalQueryException( "User is not allowed to view org unit: " + unit.getUid() );
}
}
}
// Write
@Override
public void writeDataValueSetXml( DataExportParams params, OutputStream out )
{
decideAccess( params );
validate( params );
dataValueSetStore.writeDataValueSetXml( params, getCompleteDate( params ), out );
}
@Override
public void writeDataValueSetJson( DataExportParams params, OutputStream out )
{
decideAccess( params );
validate( params );
dataValueSetStore.writeDataValueSetJson( params, getCompleteDate( params ), out );
}
@Override
public void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes )
{
dataValueSetStore.writeDataValueSetJson( lastUpdated, outputStream, idSchemes );
}
@Override
public void writeDataValueSetJson( Date lastUpdated, OutputStream outputStream, IdSchemes idSchemes, int pageSize, int page )
{
dataValueSetStore.writeDataValueSetJson( lastUpdated, outputStream, idSchemes, pageSize, page );
}
@Override
public void writeDataValueSetCsv( DataExportParams params, Writer writer )
{
decideAccess( params );
validate( params );
dataValueSetStore.writeDataValueSetCsv( params, getCompleteDate( params ), writer );
}
private Date getCompleteDate( DataExportParams params )
{
if ( params.isSingleDataValueSet() )
{
CategoryOptionCombo optionCombo = categoryService.getDefaultCategoryOptionCombo(); //TODO
CompleteDataSetRegistration registration = registrationService
.getCompleteDataSetRegistration( params.getFirstDataSet(), params.getFirstPeriod(), params.getFirstOrganisationUnit(), optionCombo );
return registration != null ? registration.getDate() : null;
}
return null;
}
// Template
@Override
public RootNode getDataValueSetTemplate( DataSet dataSet, Period period, List<String> orgUnits,
boolean writeComments, String ouScheme, String deScheme )
{
RootNode rootNode = new RootNode( "dataValueSet" );
rootNode.setNamespace( DxfNamespaces.DXF_2_0 );
rootNode.setComment( "Data set: " + dataSet.getDisplayName() + " (" + dataSet.getUid() + ")" );
CollectionNode collectionNode = rootNode.addChild( new CollectionNode( "dataValues" ) );
collectionNode.setWrapping( false );
if ( orgUnits.isEmpty() )
{
for ( DataElement dataElement : dataSet.getDataElements() )
{
CollectionNode collection = getDataValueTemplate( dataElement, deScheme, null, ouScheme, period,
writeComments );
collectionNode.addChildren( collection.getChildren() );
}
}
else
{
for ( String orgUnit : orgUnits )
{
OrganisationUnit organisationUnit = identifiableObjectManager.search( OrganisationUnit.class, orgUnit );
if ( organisationUnit == null )
{
continue;
}
for ( DataElement dataElement : dataSet.getDataElements() )
{
CollectionNode collection = getDataValueTemplate( dataElement, deScheme, organisationUnit, ouScheme,
period, writeComments );
collectionNode.addChildren( collection.getChildren() );
}
}
}
return rootNode;
}
private CollectionNode getDataValueTemplate( DataElement dataElement, String deScheme,
OrganisationUnit organisationUnit, String ouScheme, Period period, boolean comment )
{
CollectionNode collectionNode = new CollectionNode( "dataValues" );
collectionNode.setWrapping( false );
for ( CategoryOptionCombo categoryOptionCombo : dataElement.getSortedCategoryOptionCombos() )
{
ComplexNode complexNode = collectionNode.addChild( new ComplexNode( "dataValue" ) );
String label = dataElement.getDisplayName();
if ( !categoryOptionCombo.isDefault() )
{
label += " " + categoryOptionCombo.getDisplayName();
}
if ( comment )
{
complexNode.setComment( "Data element: " + label );
}
if ( IdentifiableProperty.CODE.toString().toLowerCase()
.equals( deScheme.toLowerCase() ) )
{
SimpleNode simpleNode = complexNode.addChild( new SimpleNode( "dataElement", dataElement.getCode() ) );
simpleNode.setAttribute( true );
}
else
{
SimpleNode simpleNode = complexNode.addChild( new SimpleNode( "dataElement", dataElement.getUid() ) );
simpleNode.setAttribute( true );
}
SimpleNode simpleNode = complexNode.addChild( new SimpleNode( "categoryOptionCombo", categoryOptionCombo.getUid() ) );
simpleNode.setAttribute( true );
simpleNode = complexNode.addChild( new SimpleNode( "period", period != null ? period.getIsoDate() : "" ) );
simpleNode.setAttribute( true );
if ( organisationUnit != null )
{
if ( IdentifiableProperty.CODE.toString().toLowerCase().equals( ouScheme.toLowerCase() ) )
{
simpleNode = complexNode.addChild( new SimpleNode( "orgUnit", organisationUnit.getCode() == null ? "" : organisationUnit.getCode() ) );
simpleNode.setAttribute( true );
}
else
{
simpleNode = complexNode.addChild( new SimpleNode( "orgUnit", organisationUnit.getUid() == null ? "" : organisationUnit.getUid() ) );
simpleNode.setAttribute( true );
}
}
simpleNode = complexNode.addChild( new SimpleNode( "value", "" ) );
simpleNode.setAttribute( true );
}
return collectionNode;
}
// Save
@Override
public ImportSummary saveDataValueSet( InputStream in )
{
return saveDataValueSet( in, ImportOptions.getDefaultImportOptions(), null );
}
@Override
public ImportSummary saveDataValueSetJson( InputStream in )
{
return saveDataValueSetJson( in, ImportOptions.getDefaultImportOptions(), null );
}
@Override
public ImportSummary saveDataValueSet( InputStream in, ImportOptions importOptions )
{
return saveDataValueSet( in, importOptions, null );
}
@Override
public ImportSummary saveDataValueSetJson( InputStream in, ImportOptions importOptions )
{
return saveDataValueSetJson( in, importOptions, null );
}
@Override
public ImportSummary saveDataValueSetCsv( InputStream in, ImportOptions importOptions )
{
return saveDataValueSetCsv( in, importOptions, null );
}
@Override
public ImportSummary saveDataValueSet( InputStream in, ImportOptions importOptions, JobConfiguration id )
{
try
{
in = StreamUtils.wrapAndCheckCompressionFormat( in );
DataValueSet dataValueSet = new StreamingXmlDataValueSet( XMLFactory.getXMLReader( in ) );
return saveDataValueSet( importOptions, id, dataValueSet );
}
catch ( Exception ex )
{
log.error( DebugUtils.getStackTrace( ex ) );
notifier.notify( id, ERROR, "Process failed: " + ex.getMessage(), true );
return new ImportSummary( ImportStatus.ERROR, "The import process failed: " + ex.getMessage() );
}
}
@Override
public ImportSummary saveDataValueSetJson( InputStream in, ImportOptions importOptions, JobConfiguration id )
{
try
{
in = StreamUtils.wrapAndCheckCompressionFormat( in );
DataValueSet dataValueSet = DefaultRenderService.getJsonMapper().readValue( in, DataValueSet.class );
return saveDataValueSet( importOptions, id, dataValueSet );
}
catch ( Exception ex )
{
log.error( DebugUtils.getStackTrace( ex ) );
notifier.notify( id, ERROR, "Process failed: " + ex.getMessage(), true );
return new ImportSummary( ImportStatus.ERROR, "The import process failed: " + ex.getMessage() );
}
}
@Override
public ImportSummary saveDataValueSetCsv( InputStream in, ImportOptions importOptions, JobConfiguration id )
{
try
{
in = StreamUtils.wrapAndCheckCompressionFormat( in );
DataValueSet dataValueSet = new StreamingCsvDataValueSet( new CsvReader( in, Charset.forName( "UTF-8" ) ) );
return saveDataValueSet( importOptions, id, dataValueSet );
}
catch ( Exception ex )
{
log.error( DebugUtils.getStackTrace( ex ) );
notifier.clear( id ).notify( id, ERROR, "Process failed: " + ex.getMessage(), true );
return new ImportSummary( ImportStatus.ERROR, "The import process failed: " + ex.getMessage() );
}
}
@Override
public ImportSummary saveDataValueSetPdf( InputStream in, ImportOptions importOptions, JobConfiguration id )
{
try
{
DataValueSet dataValueSet = PdfDataEntryFormUtil.getDataValueSet( in );
return saveDataValueSet( importOptions, id, dataValueSet );
}
catch ( RuntimeException ex )
{
log.error( DebugUtils.getStackTrace( ex ) );
notifier.clear( id ).notify( id, ERROR, "Process failed: " + ex.getMessage(), true );
return new ImportSummary( ImportStatus.ERROR, "The import process failed: " + ex.getMessage() );
}
}
/**
* There are specific id schemes for data elements and organisation units and
* a generic id scheme for all objects. The specific id schemes will take
* precedence over the generic id scheme. The generic id scheme also applies
* to data set and category option combo.
* <p>
* The id schemes uses the following order of precedence:
* <p>
* <ul>
* <li>Id scheme from the data value set</li>
* <li>Id scheme from the import options</li>
* <li>Default id scheme which is UID</li>
* <ul>
* <p>
* If id scheme is specific in the data value set, any id schemes in the import
* options will be ignored.
*
* @param importOptions
* @param id
* @param dataValueSet
* @return
*/
private ImportSummary saveDataValueSet( ImportOptions importOptions, JobConfiguration id, DataValueSet dataValueSet )
{
importOptions = ObjectUtils.firstNonNull( importOptions, ImportOptions.getDefaultImportOptions() );
Clock clock = new Clock( log ).startClock().logTime( "Starting data value import, options: " + importOptions );
NotificationLevel notificationLevel = importOptions.getNotificationLevel( INFO );
notifier.clear( id ).notify( id, notificationLevel, "Process started" );
ImportSummary summary = new ImportSummary()
.setImportOptions( importOptions );
boolean isIso8601 = calendarService.getSystemCalendar().isIso8601();
boolean skipLockExceptionCheck = !lockExceptionStore.anyExists();
log.info( String.format( "Is ISO calendar: %b, skip lock exception check: %b", isIso8601, skipLockExceptionCheck ) );
I18n i18n = i18nManager.getI18n();
final User currentUser = currentUserService.getCurrentUser();
final String currentUserName = currentUser.getUsername();
// Get import options
log.info( "Import options: " + importOptions );
IdScheme dvSetIdScheme = IdScheme.from( dataValueSet.getIdSchemeProperty() );
IdScheme dvSetDataElementIdScheme = IdScheme.from( dataValueSet.getDataElementIdSchemeProperty() );
IdScheme dvSetOrgUnitIdScheme = IdScheme.from( dataValueSet.getOrgUnitIdSchemeProperty() );
IdScheme dvSetCategoryOptComboIdScheme = IdScheme.from( dataValueSet.getCategoryOptionComboIdSchemeProperty() );
IdScheme dvSetDataSetIdScheme = IdScheme.from( dataValueSet.getDataSetIdSchemeProperty() );
log.info( "Data value set identifier scheme: " + dvSetIdScheme + ", data element: " + dvSetDataElementIdScheme +
", org unit: " + dvSetOrgUnitIdScheme + ", category option combo: " + dvSetCategoryOptComboIdScheme + ", data set: " + dvSetDataSetIdScheme );
IdScheme idScheme = dvSetIdScheme.isNotNull() ? dvSetIdScheme : importOptions.getIdSchemes().getIdScheme();
IdScheme dataElementIdScheme = dvSetDataElementIdScheme.isNotNull() ? dvSetDataElementIdScheme : importOptions.getIdSchemes().getDataElementIdScheme();
IdScheme orgUnitIdScheme = dvSetOrgUnitIdScheme.isNotNull() ? dvSetOrgUnitIdScheme : importOptions.getIdSchemes().getOrgUnitIdScheme();
IdScheme categoryOptComboIdScheme = dvSetCategoryOptComboIdScheme.isNotNull() ? dvSetCategoryOptComboIdScheme : importOptions.getIdSchemes().getCategoryOptionComboIdScheme();
IdScheme dataSetIdScheme = dvSetDataSetIdScheme.isNotNull() ? dvSetDataSetIdScheme : importOptions.getIdSchemes().getDataSetIdScheme();
log.info( "Identifier scheme: " + idScheme + ", data element: " + dataElementIdScheme +
", org unit: " + orgUnitIdScheme + ", category option combo: " + categoryOptComboIdScheme + ", data set: " + dataSetIdScheme );
ImportStrategy strategy = dataValueSet.getStrategy() != null ?
ImportStrategy.valueOf( dataValueSet.getStrategy() ) : importOptions.getImportStrategy();
boolean dryRun = dataValueSet.getDryRun() != null ? dataValueSet.getDryRun() : importOptions.isDryRun();
boolean skipExistingCheck = importOptions.isSkipExistingCheck();
boolean strictPeriods = importOptions.isStrictPeriods() || (Boolean) systemSettingManager.getSystemSetting( SettingKey.DATA_IMPORT_STRICT_PERIODS );
boolean strictDataElements = importOptions.isStrictDataElements() || (Boolean) systemSettingManager.getSystemSetting( SettingKey.DATA_IMPORT_STRICT_DATA_ELEMENTS );
boolean strictCategoryOptionCombos = importOptions.isStrictCategoryOptionCombos() || (Boolean) systemSettingManager.getSystemSetting( SettingKey.DATA_IMPORT_STRICT_CATEGORY_OPTION_COMBOS );
boolean strictAttrOptionCombos = importOptions.isStrictAttributeOptionCombos() || (Boolean) systemSettingManager.getSystemSetting( SettingKey.DATA_IMPORT_STRICT_ATTRIBUTE_OPTION_COMBOS );
boolean strictOrgUnits = importOptions.isStrictOrganisationUnits() || (Boolean) systemSettingManager.getSystemSetting( SettingKey.DATA_IMPORT_STRICT_ORGANISATION_UNITS );
boolean requireCategoryOptionCombo = importOptions.isRequireCategoryOptionCombo() || (Boolean) systemSettingManager.getSystemSetting( SettingKey.DATA_IMPORT_REQUIRE_CATEGORY_OPTION_COMBO );
boolean requireAttrOptionCombo = importOptions.isRequireAttributeOptionCombo() || (Boolean) systemSettingManager.getSystemSetting( SettingKey.DATA_IMPORT_REQUIRE_ATTRIBUTE_OPTION_COMBO );
boolean forceDataInput = inputUtils.canForceDataInput( currentUser, importOptions.isForce() );
// Create meta-data maps
CachingMap<String, DataElement> dataElementMap = new CachingMap<>();
CachingMap<String, OrganisationUnit> orgUnitMap = new CachingMap<>();
CachingMap<String, CategoryOptionCombo> optionComboMap = new CachingMap<>();
CachingMap<String, DataSet> dataElementDataSetMap = new CachingMap<>();
CachingMap<String, Period> periodMap = new CachingMap<>();
CachingMap<String, Set<PeriodType>> dataElementPeriodTypesMap = new CachingMap<>();
CachingMap<String, Set<CategoryOptionCombo>> dataElementCategoryOptionComboMap = new CachingMap<>();
CachingMap<String, Set<CategoryOptionCombo>> dataElementAttrOptionComboMap = new CachingMap<>();
CachingMap<String, Boolean> dataElementOrgUnitMap = new CachingMap<>();
CachingMap<String, Boolean> dataSetLockedMap = new CachingMap<>();
CachingMap<String, Period> dataElementLatestFuturePeriodMap = new CachingMap<>();
CachingMap<String, Boolean> orgUnitInHierarchyMap = new CachingMap<>();
CachingMap<String, DateRange> attrOptionComboDateRangeMap = new CachingMap<>();
CachingMap<String, Boolean> attrOptionComboOrgUnitMap = new CachingMap<>();
CachingMap<String, Optional<Set<String>>> dataElementOptionsMap = new CachingMap<>();
CachingMap<String, Boolean> approvalMap = new CachingMap<>();
CachingMap<String, Boolean> lowestApprovalLevelMap = new CachingMap<>();
CachingMap<String, Boolean> periodOpenForDataElement = new CachingMap<>();
// Get meta-data maps
IdentifiableObjectCallable<DataElement> dataElementCallable = new IdentifiableObjectCallable<>(
identifiableObjectManager, DataElement.class, dataElementIdScheme, null );
IdentifiableObjectCallable<OrganisationUnit> orgUnitCallable = new IdentifiableObjectCallable<>(
identifiableObjectManager, OrganisationUnit.class, orgUnitIdScheme, trimToNull( dataValueSet.getOrgUnit() ) );
IdentifiableObjectCallable<CategoryOptionCombo> categoryOptionComboCallable = new CategoryOptionComboAclCallable(
categoryService, categoryOptComboIdScheme, null );
IdentifiableObjectCallable<CategoryOptionCombo> attributeOptionComboCallable = new CategoryOptionComboAclCallable(
categoryService, categoryOptComboIdScheme, null );
IdentifiableObjectCallable<Period> periodCallable = new PeriodCallable( periodService, null, trimToNull( dataValueSet.getPeriod() ) );
// Heat caches
if ( importOptions.isPreheatCacheDefaultFalse() )
{
dataElementMap.load( identifiableObjectManager.getAll( DataElement.class ), o -> o.getPropertyValue( dataElementIdScheme ) );
orgUnitMap.load( identifiableObjectManager.getAll( OrganisationUnit.class ), o -> o.getPropertyValue( orgUnitIdScheme ) );
optionComboMap.load( identifiableObjectManager.getAll( CategoryOptionCombo.class ), o -> o.getPropertyValue( categoryOptComboIdScheme ) );
}
// Get outer meta-data
DataSet dataSet = dataValueSet.getDataSet() != null ? identifiableObjectManager.getObject( DataSet.class, dataSetIdScheme, dataValueSet.getDataSet() ) : null;
Date completeDate = parseDate( dataValueSet.getCompleteDate() );
Period outerPeriod = periodMap.get( trimToNull( dataValueSet.getPeriod() ), periodCallable );
OrganisationUnit outerOrgUnit = orgUnitMap.get( trimToNull( dataValueSet.getOrgUnit() ), orgUnitCallable );
CategoryOptionCombo fallbackCategoryOptionCombo = categoryService.getDefaultCategoryOptionCombo();
CategoryOptionCombo outerAttrOptionCombo = null;
Set<DataElement> dataSetDataElements = dataSet != null ? dataSet.getDataElements() : new HashSet<>();
if ( dataValueSet.getAttributeOptionCombo() != null )
{
outerAttrOptionCombo = optionComboMap.get( trimToNull( dataValueSet.getAttributeOptionCombo() ), attributeOptionComboCallable.setId( trimToNull( dataValueSet.getAttributeOptionCombo() ) ) );
}
else if ( dataValueSet.getAttributeCategoryOptions() != null )
{
outerAttrOptionCombo = inputUtils.getAttributeOptionCombo( dataSet.getCategoryCombo(), new HashSet<String>( dataValueSet.getAttributeCategoryOptions() ), idScheme );
}
// Validation
if ( dataSet == null && trimToNull( dataValueSet.getDataSet() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValueSet.getDataSet(), "Data set not found or not accessible" ) );
summary.setStatus( ImportStatus.ERROR );
}
if ( dataSet != null && !aclService.canDataWrite( currentUser, dataSet ) )
{
summary.getConflicts().add( new ImportConflict( dataValueSet.getDataSet(), "User does not have write access for DataSet: " + dataSet.getUid() ) );
summary.setStatus( ImportStatus.ERROR );
}
if ( dataSet == null && strictDataElements )
{
summary.getConflicts().add( new ImportConflict( "DATA_IMPORT_STRICT_DATA_ELEMENTS", "A valid datset is required" ) );
summary.setStatus( ImportStatus.ERROR );
}
if ( outerOrgUnit == null && trimToNull( dataValueSet.getOrgUnit() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValueSet.getOrgUnit(), "Org unit not found or not accessible" ) );
summary.setStatus( ImportStatus.ERROR );
}
if ( outerAttrOptionCombo == null && trimToNull( dataValueSet.getAttributeOptionCombo() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValueSet.getAttributeOptionCombo(), "Attribute option combo not found or not accessible" ) );
summary.setStatus( ImportStatus.ERROR );
}
if ( ImportStatus.ERROR.equals( summary.getStatus() ) )
{
summary.setDescription( "Import process was aborted" );
notifier.notify( id, WARN, "Import process aborted", true ).addJobSummary( id, summary, ImportSummary.class );
dataValueSet.close();
return summary;
}
if ( dataSet != null && completeDate != null )
{
notifier.notify( id, notificationLevel, "Completing data set" );
handleComplete( dataSet, completeDate, outerPeriod, outerOrgUnit, fallbackCategoryOptionCombo, summary ); //TODO
}
else
{
summary.setDataSetComplete( Boolean.FALSE.toString() );
}
final Set<OrganisationUnit> currentOrgUnits = currentUserService.getCurrentUserOrganisationUnits();
BatchHandler<DataValue> dataValueBatchHandler = batchHandlerFactory.createBatchHandler( DataValueBatchHandler.class ).init();
BatchHandler<DataValueAudit> auditBatchHandler = batchHandlerFactory.createBatchHandler( DataValueAuditBatchHandler.class ).init();
int importCount = 0;
int updateCount = 0;
int deleteCount = 0;
int totalCount = 0;
// Data values
Date now = new Date();
clock.logTime( "Validated outer meta-data" );
notifier.notify( id, notificationLevel, "Importing data values" );
while ( dataValueSet.hasNextDataValue() )
{
org.hisp.dhis.dxf2.datavalue.DataValue dataValue = dataValueSet.getNextDataValue();
totalCount++;
final DataElement dataElement =
dataElementMap.get( trimToNull( dataValue.getDataElement() ), dataElementCallable.setId( trimToNull( dataValue.getDataElement() ) ) );
final Period period = outerPeriod != null ? outerPeriod :
periodMap.get( trimToNull( dataValue.getPeriod() ), periodCallable.setId( trimToNull( dataValue.getPeriod() ) ) );
final OrganisationUnit orgUnit = outerOrgUnit != null ? outerOrgUnit :
orgUnitMap.get( trimToNull( dataValue.getOrgUnit() ), orgUnitCallable.setId( trimToNull( dataValue.getOrgUnit() ) ) );
CategoryOptionCombo categoryOptionCombo =
optionComboMap.get( trimToNull( dataValue.getCategoryOptionCombo() ), categoryOptionComboCallable.setId( trimToNull( dataValue.getCategoryOptionCombo() ) ) );
CategoryOptionCombo attrOptionCombo = outerAttrOptionCombo != null ? outerAttrOptionCombo :
optionComboMap.get( trimToNull( dataValue.getAttributeOptionCombo() ), attributeOptionComboCallable.setId( trimToNull( dataValue.getAttributeOptionCombo() ) ) );
// Potentially heat caches
if ( !dataElementMap.isCacheLoaded() && dataElementMap.getCacheMissCount() > CACHE_MISS_THRESHOLD )
{
dataElementMap.load( identifiableObjectManager.getAll( DataElement.class ), o -> o.getPropertyValue( dataElementIdScheme ) );
log.info( "Data element cache heated after cache miss threshold reached" );
}
if ( !orgUnitMap.isCacheLoaded() && orgUnitMap.getCacheMissCount() > CACHE_MISS_THRESHOLD )
{
orgUnitMap.load( identifiableObjectManager.getAll( OrganisationUnit.class ), o -> o.getPropertyValue( orgUnitIdScheme ) );
log.info( "Org unit cache heated after cache miss threshold reached" );
}
if ( !optionComboMap.isCacheLoaded() && optionComboMap.getCacheMissCount() > CACHE_MISS_THRESHOLD )
{
optionComboMap.load( identifiableObjectManager.getAll( CategoryOptionCombo.class ), o -> o.getPropertyValue(
categoryOptComboIdScheme ) );
log.info( "Category Option Combo cache heated after cache miss threshold reached" );
}
// Validation
if ( dataElement == null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getDataElement(), "Data element not found or not accessible" ) );
continue;
}
if ( period == null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getPeriod(), "Period not valid" ) );
continue;
}
if ( orgUnit == null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getOrgUnit(), "Organisation unit not found or not accessible" ) );
continue;
}
if ( categoryOptionCombo == null && trimToNull( dataValue.getCategoryOptionCombo() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getCategoryOptionCombo(), "Category option combo not found or not accessible for writing data" ) );
continue;
}
if ( categoryOptionCombo != null )
{
List<String> errors = accessManager.canWrite( currentUser, categoryOptionCombo );
if ( !errors.isEmpty() )
{
summary.getConflicts().addAll( errors.stream().map( s -> new ImportConflict( "dataValueSet", s ) ).collect( Collectors.toList() ) );
continue;
}
}
if ( attrOptionCombo == null && trimToNull( dataValue.getAttributeOptionCombo() ) != null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getAttributeOptionCombo(), "Attribute option combo not found or not accessible for writing data" ) );
continue;
}
if ( attrOptionCombo != null )
{
List<String> errors = accessManager.canWrite( currentUser, attrOptionCombo );
if ( !errors.isEmpty() )
{
summary.getConflicts().addAll( errors.stream().map( s -> new ImportConflict( "dataValueSet", s ) ).collect( Collectors.toList() ) );
continue;
}
}
boolean inUserHierarchy = orgUnitInHierarchyMap.get( orgUnit.getUid(), () -> orgUnit.isDescendant( currentOrgUnits ) );
if ( !inUserHierarchy )
{
summary.getConflicts().add( new ImportConflict( orgUnit.getUid(), "Organisation unit not in hierarchy of current user: " + currentUserName ) );
continue;
}
if ( dataValue.isNullValue() && !dataValue.isDeletedValue() )
{
summary.getConflicts().add( new ImportConflict( "Value", "Data value or comment not specified for data element: " + dataElement.getUid() ) );
continue;
}
dataValue.setValueForced(
ValidationUtils.normalizeBoolean( dataValue.getValue(), dataElement.getValueType() ) );
String valueValid = ValidationUtils.dataValueIsValid( dataValue.getValue(), dataElement );
if ( valueValid != null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getValue(), i18n.getString( valueValid ) + ", must match data element type: " + dataElement.getUid() ) );
continue;
}
String commentValid = ValidationUtils.commentIsValid( dataValue.getComment() );
if ( commentValid != null )
{
summary.getConflicts().add( new ImportConflict( "Comment", i18n.getString( commentValid ) ) );
continue;
}
Optional<Set<String>> optionCodes = dataElementOptionsMap.get( dataElement.getUid(), () -> dataElement.hasOptionSet() ?
Optional.of( dataElement.getOptionSet().getOptionCodesAsSet() ) : Optional.empty() );
if ( optionCodes.isPresent() && !optionCodes.get().contains( dataValue.getValue() ) )
{
summary.getConflicts().add( new ImportConflict( dataValue.getValue(), "Data value is not a valid option of the data element option set: " + dataElement.getUid() ) );
continue;
}
// Constraints
if ( categoryOptionCombo == null )
{
if ( requireCategoryOptionCombo )
{
summary.getConflicts().add( new ImportConflict( dataValue.getValue(), "Category option combo is required but is not specified" ) );
continue;
}
else
{
categoryOptionCombo = fallbackCategoryOptionCombo;
}
}
if ( attrOptionCombo == null )
{
if ( requireAttrOptionCombo )
{
summary.getConflicts().add( new ImportConflict( dataValue.getValue(), "Attribute option combo is required but is not specified" ) );
continue;
}
else
{
attrOptionCombo = fallbackCategoryOptionCombo;
}
}
if ( strictPeriods && !dataElementPeriodTypesMap.get( dataElement.getUid(),
() -> dataElement.getPeriodTypes() ).contains( period.getPeriodType() ) )
{
summary.getConflicts().add( new ImportConflict( dataValue.getPeriod(),
"Period type of period: " + period.getIsoDate() + " not valid for data element: " + dataElement.getUid() ) );
continue;
}
if ( strictDataElements && !dataSetDataElements.contains( dataElement ) )
{
summary.getConflicts().add( new ImportConflict( "DATA_IMPORT_STRICT_DATA_ELEMENTS",
"Data element: " + dataValue.getDataElement() + " is not part of dataset: " + dataSet.getUid() ) );
continue;
}
if ( strictCategoryOptionCombos && !dataElementCategoryOptionComboMap.get( dataElement.getUid(),
() -> dataElement.getCategoryOptionCombos() ).contains( categoryOptionCombo ) )
{
summary.getConflicts().add( new ImportConflict( categoryOptionCombo.getUid(),
"Category option combo: " + categoryOptionCombo.getUid() + " must be part of category combo of data element: " + dataElement.getUid() ) );
continue;
}
if ( strictAttrOptionCombos && !dataElementAttrOptionComboMap.get( dataElement.getUid(),
() -> dataElement.getDataSetCategoryOptionCombos() ).contains( attrOptionCombo ) )
{
summary.getConflicts().add( new ImportConflict( attrOptionCombo.getUid(),
"Attribute option combo: " + attrOptionCombo.getUid() + " must be part of category combo of data sets of data element: " + dataElement.getUid() ) );
continue;
}
if ( strictOrgUnits && BooleanUtils.isFalse( dataElementOrgUnitMap.get( dataElement.getUid() + orgUnit.getUid(),
() -> orgUnit.hasDataElement( dataElement ) ) ) )
{
summary.getConflicts().add( new ImportConflict( orgUnit.getUid(),
"Data element: " + dataElement.getUid() + " must be assigned through data sets to organisation unit: " + orgUnit.getUid() ) );
continue;
}
boolean zeroInsignificant = ValidationUtils.dataValueIsZeroAndInsignificant( dataValue.getValue(), dataElement );
if ( zeroInsignificant )
{
summary.getConflicts().add( new ImportConflict( dataValue.getValue(), "Value is zero and not significant, must match data element: " + dataElement.getUid() ) );
continue;
}
String storedByValid = ValidationUtils.storedByIsValid( dataValue.getStoredBy() );
if ( storedByValid != null )
{
summary.getConflicts().add( new ImportConflict( dataValue.getStoredBy(), i18n.getString( storedByValid ) ) );
continue;
}
String storedBy = dataValue.getStoredBy() == null || dataValue.getStoredBy().trim().isEmpty() ? currentUserName : dataValue.getStoredBy();
final CategoryOptionCombo aoc = attrOptionCombo;
DateRange aocDateRange = attrOptionComboDateRangeMap.get( attrOptionCombo.getUid(), () -> aoc.getDateRange() );
if ( (aocDateRange.getStartDate() != null && aocDateRange.getStartDate().compareTo( period.getStartDate() ) > 0)
|| (aocDateRange.getEndDate() != null && aocDateRange.getEndDate().compareTo( period.getEndDate() ) < 0) )
{
summary.getConflicts().add( new ImportConflict( orgUnit.getUid(),
"Period: " + period.getIsoDate() + " is not within date range of attribute option combo: " + attrOptionCombo.getUid() ) );
continue;
}
if ( !attrOptionComboOrgUnitMap.get( attrOptionCombo.getUid() + orgUnit.getUid(), () ->
{
Set<OrganisationUnit> aocOrgUnits = aoc.getOrganisationUnits();
return aocOrgUnits == null || orgUnit.isDescendant( aocOrgUnits );
} ) )
{
summary.getConflicts().add( new ImportConflict( orgUnit.getUid(),
"Organisation unit: " + orgUnit.getUid() + " is not valid for attribute option combo: " + attrOptionCombo.getUid() ) );
continue;
}
final DataSet approvalDataSet = dataSet != null ? dataSet : dataElementDataSetMap.get( dataElement.getUid(),
() -> dataElement.getApprovalDataSet() );
if ( approvalDataSet != null && !forceDataInput ) // Data element is assigned to at least one data set
{
if ( dataSetLockedMap.get( approvalDataSet.getUid() + period.getUid() + orgUnit.getUid(),
() -> isLocked( currentUser, approvalDataSet, period, orgUnit, skipLockExceptionCheck ) ) )
{
summary.getConflicts().add( new ImportConflict( period.getIsoDate(), "Current date is past expiry days for period " +
period.getIsoDate() + " and data set: " + approvalDataSet.getUid() ) );
continue;
}
Period latestFuturePeriod = dataElementLatestFuturePeriodMap.get( dataElement.getUid(), () -> dataElement.getLatestOpenFuturePeriod() );
if ( period.isAfter( latestFuturePeriod ) && isIso8601 )
{
summary.getConflicts().add( new ImportConflict( period.getIsoDate(), "Period: " +
period.getIsoDate() + " is after latest open future period: " + latestFuturePeriod.getIsoDate() + " for data element: " + dataElement.getUid() ) );
continue;
}
DataApprovalWorkflow workflow = approvalDataSet.getWorkflow();
if ( workflow != null )
{
final String workflowPeriodAoc = workflow.getUid() + period.getUid() + attrOptionCombo.getUid();
if ( approvalMap.get( orgUnit.getUid() + workflowPeriodAoc, () ->
{
DataApproval lowestApproval = DataApproval.getLowestApproval( new DataApproval( null, workflow, period, orgUnit, aoc ) );
return lowestApproval != null && lowestApprovalLevelMap.get( lowestApproval.getDataApprovalLevel().getUid() + lowestApproval.getOrganisationUnit().getUid() + workflowPeriodAoc,
() -> approvalService.getDataApproval( lowestApproval ) != null );
} ) )
{
summary.getConflicts().add( new ImportConflict( orgUnit.getUid(),
"Data is already approved for data set: " + approvalDataSet.getUid() + " period: " + period.getIsoDate()
+ " organisation unit: " + orgUnit.getUid() + " attribute option combo: " + attrOptionCombo.getUid() ) );
continue;
}
}
}
if ( approvalDataSet != null && !forceDataInput && !approvalDataSet.isDataInputPeriodAndDateAllowed( period, new Date() ) )
{
summary.getConflicts().add( new ImportConflict( orgUnit.getUid(),
"Period: " + period.getIsoDate() + " is not open for this data set at this time: " + approvalDataSet.getUid() ) );
continue;
}
if ( !forceDataInput && !periodOpenForDataElement.get( dataElement.getUid() + period.getIsoDate(), () -> dataElement.isDataInputAllowedForPeriodAndDate( period, new Date() ) ) )
{
summary.getConflicts().add( new ImportConflict( orgUnit.getUid(), "Period " + period.getName() + " does not conform to the open periods of associated data sets" ) );
continue;
}
DataValue actualDataValue = null;
if ( strategy.isDelete() && dataElement.isFileType() )
{
actualDataValue = dataValueService.getDataValue( dataElement, period, orgUnit, categoryOptionCombo, attrOptionCombo );
if ( actualDataValue == null )
{
summary.getConflicts().add( new ImportConflict( dataElement.getUid(), "No data value for file resource exist for the given combination" ) );
continue;
}
}
// Create data value
DataValue internalValue = new DataValue();
internalValue.setDataElement( dataElement );
internalValue.setPeriod( period );
internalValue.setSource( orgUnit );
internalValue.setCategoryOptionCombo( categoryOptionCombo );
internalValue.setAttributeOptionCombo( attrOptionCombo );
internalValue.setValue( trimToNull( dataValue.getValue() ) );
internalValue.setStoredBy( storedBy );
internalValue.setCreated( dataValue.hasCreated() ? parseDate( dataValue.getCreated() ) : now );
internalValue.setLastUpdated( dataValue.hasLastUpdated() ? parseDate( dataValue.getLastUpdated() ) : now );
internalValue.setComment( trimToNull( dataValue.getComment() ) );
internalValue.setFollowup( dataValue.getFollowup() );
internalValue.setDeleted( BooleanUtils.isTrue( dataValue.getDeleted() ) );
// Save, update or delete data value
DataValue existingValue = !skipExistingCheck ? dataValueBatchHandler.findObject( internalValue ) : null;
// Check soft deleted data values on update and import
if ( !skipExistingCheck && existingValue != null && !existingValue.isDeleted() )
{
if ( strategy.isCreateAndUpdate() || strategy.isUpdate() )
{
DataValueAudit auditValue = new DataValueAudit( internalValue, existingValue.getValue(), storedBy, AuditType.UPDATE );
if ( internalValue.isNullValue() || internalValue.isDeleted() )
{
internalValue.setDeleted( true );
auditValue.setAuditType( AuditType.DELETE );
deleteCount++;
}
else
{
updateCount++;
}
if ( !dryRun )
{
dataValueBatchHandler.updateObject( internalValue );
auditBatchHandler.addObject( auditValue );
if ( dataElement.isFileType() )
{
FileResource fr = fileResourceService.getFileResource( internalValue.getValue() );
fr.setAssigned( true );
fileResourceService.updateFileResource( fr );
}
}
}
else if ( strategy.isDelete() )
{
DataValueAudit auditValue = new DataValueAudit( internalValue, existingValue.getValue(), storedBy, AuditType.DELETE );
internalValue.setDeleted( true );
deleteCount++;
if ( !dryRun )
{
if ( dataElement.isFileType() )
{
FileResource fr = fileResourceService.getFileResource( actualDataValue.getValue() );
fr.setAssigned( false );
fileResourceService.updateFileResource( fr );
}
dataValueBatchHandler.updateObject( internalValue );
auditBatchHandler.addObject( auditValue );
}
}
}
else
{
if ( strategy.isCreateAndUpdate() || strategy.isCreate() )
{
if ( !internalValue.isNullValue() ) // Ignore null values
{
if ( existingValue != null && existingValue.isDeleted() )
{
importCount++;
if ( !dryRun )
{
dataValueBatchHandler.updateObject( internalValue );
if ( dataElement.isFileType() )
{
FileResource fr = fileResourceService.getFileResource( internalValue.getValue() );
fr.setAssigned( true );
fileResourceService.updateFileResource( fr );
}
}
}
else
{
boolean added = false;
if ( !dryRun )
{
added = dataValueBatchHandler.addObject( internalValue );
if ( added && dataElement.isFileType() )
{
FileResource fr = fileResourceService.getFileResource( internalValue.getValue() );
fr.setAssigned( true );
fileResourceService.updateFileResource( fr );
}
}
if ( dryRun || added )
{
importCount++;
}
}
}
}
}
}
dataValueBatchHandler.flush();
auditBatchHandler.flush();
int ignores = totalCount - importCount - updateCount - deleteCount;
summary.setImportCount( new ImportCount( importCount, updateCount, ignores, deleteCount ) );
summary.setStatus( summary.getConflicts().isEmpty() ? ImportStatus.SUCCESS : ImportStatus.WARNING );
summary.setDescription( "Import process completed successfully" );
clock.logTime( "Data value import done, total: " + totalCount + ", import: " + importCount + ", update: " + updateCount + ", delete: " + deleteCount );
notifier.notify( id, notificationLevel, "Import done", true ).addJobSummary( id, notificationLevel, summary, ImportSummary.class );
dataValueSet.close();
return summary;
}
// Supportive methods
private void handleComplete( DataSet dataSet, Date completeDate, Period period, OrganisationUnit orgUnit,
CategoryOptionCombo attributeOptionCombo, ImportSummary summary )
{
if ( orgUnit == null )
{
summary.getConflicts().add( new ImportConflict( OrganisationUnit.class.getSimpleName(), ERROR_OBJECT_NEEDED_TO_COMPLETE ) );
return;
}
if ( period == null )
{
summary.getConflicts().add( new ImportConflict( Period.class.getSimpleName(), ERROR_OBJECT_NEEDED_TO_COMPLETE ) );
return;
}
period = periodService.reloadPeriod( period );
CompleteDataSetRegistration completeAlready = registrationService
.getCompleteDataSetRegistration( dataSet, period, orgUnit, attributeOptionCombo );
String username = currentUserService.getCurrentUsername();
if ( completeAlready != null )
{
completeAlready.setStoredBy( username );
completeAlready.setDate( completeDate );
registrationService.updateCompleteDataSetRegistration( completeAlready );
}
else
{
CompleteDataSetRegistration registration = new CompleteDataSetRegistration( dataSet, period, orgUnit,
attributeOptionCombo, completeDate, username, completeAlready.getLastUpdatedBy(), completeDate, completeAlready.getCompleted() );
registrationService.saveCompleteDataSetRegistration( registration );
}
summary.setDataSetComplete( DateUtils.getMediumDateString( completeDate ) );
}
/**
* Checks whether the given data set is locked.
*
* @param dataSet the data set.
* @param period the period.
* @param organisationUnit the organisation unit.
* @param skipLockExceptionCheck whether to skip lock exception check.
*/
private boolean isLocked( User user, DataSet dataSet, Period period, OrganisationUnit organisationUnit, boolean skipLockExceptionCheck )
{
return dataSet.isLocked( user, period, null ) && (skipLockExceptionCheck || lockExceptionStore.getCount( dataSet, period, organisationUnit ) == 0L);
}
}
|
package eu.ydp.empiria.player.client.components;
import java.util.ArrayList;
import java.util.List;
import com.google.gwt.dom.client.Document;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.PopupPanel;
import com.google.gwt.user.client.ui.PushButton;
import com.google.gwt.user.client.ui.Widget;
public class ExListBox extends FlowPanel {
protected PopupPanel popupContainer;
protected Panel popupOuterContainer;
protected Panel popupOuterHeader;
protected Panel popupOuterFooter;
protected Panel popupInnerContainer;
protected Panel popupContents;
protected Panel baseContainer;
protected Panel baseContents;
protected PushButton baseButton;
protected List<Widget> popupBodies;
protected List<Widget> baseBodies;
protected List<ExListBoxOption> options;
protected int selectedIndex = 0;
protected boolean enabled = true;
protected ExListBoxChangeListener listener;
public ExListBox(){
super();
setStyleName("qp-exlistbox");
popupContents = new FlowPanel();
popupContents.setStyleName("qp-exlistbox-popup-contents");
popupOuterHeader = new FlowPanel();
popupOuterHeader.setStyleName("qp-exlistbox-popup-outer-header");
popupOuterFooter = new FlowPanel();
popupOuterFooter.setStyleName("qp-exlistbox-popup-outer-footer");
popupInnerContainer = new FlowPanel();
popupInnerContainer.setStyleName("qp-exlistbox-popup-inner-container");
popupInnerContainer.add(popupContents);
popupOuterContainer = new FlowPanel();
popupOuterContainer.setStyleName("qp-exlistbox-popup-outer-container");
popupOuterContainer.add(popupOuterHeader);
popupOuterContainer.add(popupInnerContainer);
popupOuterContainer.add(popupOuterFooter);
popupContainer = new PopupPanel(true);
popupContainer.setStyleName("qp-exlistbox-popup-container");
popupContainer.add(popupOuterContainer);
baseButton = new PushButton(" ");
baseButton.setStyleName("qp-exlistbox-base-button");
baseContents = new FlowPanel();
baseContents.setStyleName("qp-exlistbox-base-contents");
baseContainer = new FlowPanel();
baseContainer.setStyleName("qp-exlistbox-base-container");
baseContainer.addDomHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent arg0) {
if (enabled){
updateOptionButtonsSelection();
popupContainer.show();
updatePosition();
}
}
}, ClickEvent.getType());
baseContainer.add(baseContents);
baseContainer.add(baseButton);
add(baseContainer);
options = new ArrayList<ExListBoxOption>();
baseBodies = new ArrayList<Widget>();
popupBodies = new ArrayList<Widget>();
}
public void addOption(Widget baseBody, Widget popupBody){
baseBodies.add(baseBody);
popupBodies.add(popupBody);
final ExListBoxOption currOption = new ExListBoxOption(baseBody, popupBody);
options.add(currOption);
popupContents.add(currOption.getPopupBody());
currOption.getPopupBody().addDomHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent arg0) {
selectedIndex = options.indexOf(currOption);
setSelectedBaseBody();
listener.onChange();
}
}, ClickEvent.getType());
}
protected void setSelectedBaseBody(){
baseContents.clear();
if (selectedIndex >=0 && selectedIndex < options.size()){
baseContents.add(options.get(selectedIndex).getBaseBody());
}
popupContainer.hide();
}
protected void updatePosition(){
int mountingPointX = 0;
int mountingPointY = 0;
int MARGIN = 8;
mountingPointX = baseContainer.getAbsoluteLeft() + baseContainer.getOffsetWidth()/2 - popupContainer.getOffsetWidth()/2;
mountingPointY = baseContainer.getAbsoluteTop() - popupContainer.getOffsetHeight();
if (mountingPointX < Window.getScrollLeft() + MARGIN){
mountingPointX = Window.getScrollLeft() + MARGIN;
} else if (mountingPointX + popupContainer.getOffsetWidth() > Window.getClientWidth() + Window.getScrollLeft() + MARGIN){
mountingPointX = Window.getClientWidth() + Window.getScrollLeft() + MARGIN - popupContainer.getOffsetWidth();
}
if (mountingPointY < Window.getScrollTop() + MARGIN){
mountingPointY = Window.getScrollTop() + MARGIN;
} else if (mountingPointY + popupContainer.getOffsetHeight() > Window.getClientHeight() + Window.getScrollTop() + MARGIN){
mountingPointY = Window.getClientHeight() + Window.getScrollTop() + MARGIN - popupContainer.getOffsetHeight();
}
popupContainer.setPopupPosition(mountingPointX, mountingPointY);
}
protected void updateOptionButtonsSelection(){
for (int i = 0 ; i < options.size() ; i ++){
options.get(i).setSelected(i == selectedIndex);
}
}
public void setChangeListener(ExListBoxChangeListener listener){
this.listener = listener;
}
public int getSelectedIndex(){
return selectedIndex;
}
public void setSelectedIndex(int index){
if (index >= 0 && index < options.size()){
selectedIndex = index;
setSelectedBaseBody();
}
}
public void setEnabled(boolean enabled){
this.enabled = enabled;
if (this.enabled){
removeStyleName("qp-exlistbox-disabled");
} else {
addStyleName("qp-exlistbox-disabled");
}
}
public boolean getEnabled(){
return this.enabled;
}
}
|
package org.modeshape.connector.store.jpa.util;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import org.hibernate.ejb.Ejb3Configuration;
import org.modeshape.common.util.CheckArg;
import org.modeshape.connector.store.jpa.JpaSource;
import org.modeshape.connector.store.jpa.Model;
/**
* An option for the store. This is typically used to save store-specific values.
* <p>
* This JPA entity is always added to the {@link Ejb3Configuration} in the {@link JpaSource#getConnection() JpaSource}, and
* therefore should not be {@link Model#configure(Ejb3Configuration) added to the configuration} by a {@link Model}.
* </p>
*/
@Entity( name = "MODE_OPTIONS" )
@NamedQueries( {@NamedQuery( name = "StoreOptionEntity.findAll", query = "SELECT option FROM MODE_OPTIONS AS option" )} )
public class StoreOptionEntity {
@Id
@Column( name = "NAME", nullable = false, length = 128 )
private String name;
@Column( name = "VALUE", nullable = false, length = 512 )
private String value;
protected StoreOptionEntity() {
}
/**
* @param name the name of the option; may not be null or empty
* @param value the value of the option; may be null
*/
public StoreOptionEntity( String name,
String value ) {
CheckArg.isNotEmpty(name, "name");
setName(name);
setValue(value);
}
/**
* @param name the name of the option; may not be null or empty
*/
public StoreOptionEntity( String name ) {
CheckArg.isNotEmpty(name, "name");
setName(name);
}
/**
* @return name
*/
public String getName() {
return name;
}
/**
* @param name Sets name to the specified value.
*/
public void setName( String name ) {
this.name = name;
}
/**
* @return value
*/
public String getValue() {
return value;
}
/**
* @param value Sets value to the specified value.
*/
public void setValue( String value ) {
this.value = value;
}
/**
* {@inheritDoc}
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return getName().hashCode();
}
/**
* {@inheritDoc}
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals( Object obj ) {
if (obj == this) return true;
if (obj instanceof StoreOptionEntity) {
StoreOptionEntity that = (StoreOptionEntity)obj;
if (!this.getName().equals(that.getName())) return false;
if (!this.getValue().equals(that.getValue())) return false;
return true;
}
return false;
}
/**
* {@inheritDoc}
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "Option " + getName() + " = \"" + getValue() + "\"";
}
}
|
package com.breadwallet.core.ethereum;
import com.breadwallet.core.BRCoreJniReference;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Map;
import static com.breadwallet.core.ethereum.BREthereumToken.jniGetTokenBRD;
import static com.breadwallet.core.ethereum.BREthereumToken.jniTokenAll;
public class BREthereumLightNode extends BRCoreJniReference {
// Client
public interface Client {
// typedef void (*BREthereumClientHandlerGetBalance) (BREthereumClientContext context,
// BREthereumLightNode node,
// BREthereumWalletId wid,
// const char *address,
// int rid);
void getBalance(int wid, String address, int rid);
// typedef void (*BREthereumClientHandlerGetGasPrice) (BREthereumClientContext context,
// BREthereumLightNode node,
// BREthereumWalletId wid,
// int rid);
void getGasPrice(int wid, int rid);
// typedef void (*BREthereumClientHandlerEstimateGas) (BREthereumClientContext context,
// BREthereumLightNode node,
// BREthereumWalletId wid,
// BREthereumTransactionId tid,
// const char *to,
// const char *amount,
// const char *data,
// int rid);
void getGasEstimate(int wid, int tid, String to, String amount, String data, int rid);
// typedef void (*BREthereumClientHandlerSubmitTransaction) (BREthereumClientContext context,
// BREthereumLightNode node,
// BREthereumWalletId wid,
// BREthereumTransactionId tid,
// const char *transaction,
// int rid);
void submitTransaction(int wid, int tid, String rawTransaction, int rid);
// typedef void (*BREthereumClientHandlerGetTransactions) (BREthereumClientContext context,
// BREthereumLightNode node,
// const char *address,
// int rid);
void getTransactions(String address, int rid);
// typedef void (*BREthereumClientHandlerGetLogs) (BREthereumClientContext context,
// BREthereumLightNode node,
// const char *contract,
// const char *address,
// const char *event,
// int rid);
void getLogs (String contract, String address, String event, int rid);
// typedef void (*BREthereumClientHandlerGetBlockNumber) (BREthereumClientContext context,
// BREthereumLightNode node,
// int rid);
void getBlockNumber (int rid);
// typedef void (*BREthereumClientHandlerGetNonce) (BREthereumClientContext context,
// BREthereumLightNode node,
// const char *address,
// int rid);
void getNonce (String address, int rid);
}
// Client Announcers
public void announceBalance(int wid, String balance, int rid) {
jniAnnounceBalance(wid, balance, rid);
}
public void announceGasPrice(int wid, String gasPrice, int rid) {
jniAnnounceGasPrice(wid, gasPrice, rid);
}
public void announceGasEstimate(int wid, int tid, String gasEstimate, int rid) {
jniAnnounceGasEstimate(wid, tid, gasEstimate, rid);
}
public void announceSubmitTransaction(int wid, int tid, String hash, int rid) {
jniAnnounceSubmitTransaction(wid, tid, hash, rid);
}
public void announceTransaction(int id,
String hash,
String from,
String to,
String contract,
String amount, // value
String gasLimit,
String gasPrice,
String data,
String nonce,
String gasUsed,
String blockNumber,
String blockHash,
String blockConfirmations,
String blockTransactionIndex,
String blockTimestamp,
// cumulative gas used,
// confirmations
// txreceipt_status
String isError) {
ensureValidAddress(from);
ensureValidAddress(to);
if (null != contract && !contract.isEmpty()) ensureValidAddress(contract);
jniAnnounceTransaction(id, hash, from, to, contract, amount, gasLimit, gasPrice, data, nonce, gasUsed,
blockNumber, blockHash, blockConfirmations, blockTransactionIndex, blockTimestamp,
isError);
}
public void announceLog(int id,
String hash,
String contract,
String[] topics,
String data,
String gasPrice,
String gasUsed,
String logIndex,
String blockNumber,
String blockTransactionIndex,
String blockTimestamp) {
ensureValidAddress(contract);
jniAnnounceLog(id, hash, contract, topics, data, gasPrice, gasUsed, logIndex,
blockNumber, blockTransactionIndex, blockTimestamp);
}
public void announceBlockNumber (String blockNumber, int rid) {
jniAnnounceBlockNumber (blockNumber, rid);
}
public void announceNonce (String address, String nonce, int rid) {
ensureValidAddress(address);
jniAnnounceNonce(address, nonce, rid);
}
public void announceToken (String address,
String symbol,
String name,
String description,
int decimals,
String defaultGasLimit,
String defaultGasPrice,
int rid) {
ensureValidAddress(address);
jniAnnounceToken(address, symbol, name, description, decimals,
defaultGasLimit, defaultGasPrice,
rid);
tokensNeeded = true;
}
// Listener
// In the following the Event enumerations *must* match the corresponding declarations in
// BREthereumLightNode.h - the enumerations values/indices must be identical.
public interface Listener {
enum Status {
SUCCESS,
// Reference access
ERROR_UNKNOWN_NODE,
ERROR_UNKNOWN_TRANSACTION,
ERROR_UNKNOWN_ACCOUNT,
ERROR_UNKNOWN_WALLET,
ERROR_UNKNOWN_BLOCK,
ERROR_UNKNOWN_LISTENER,
// Node
ERROR_NODE_NOT_CONNECTED,
// Transaction
ERROR_TRANSACTION_X,
// Acount
// Wallet
// Block
// Listener
// Numeric
ERROR_NUMERIC_PARSE,
}
int NUMBER_OF_STATUS_EVENTS = 10;
// Wallet
enum WalletEvent {
CREATED,
BALANCE_UPDATED,
DEFAULT_GAS_LIMIT_UPDATED,
DEFAULT_GAS_PRICE_UPDATED,
DELETED
}
int NUMBER_OF_WALLET_EVENTS = 5;
void handleWalletEvent(BREthereumWallet wallet, WalletEvent event,
Status status,
String errorDescription);
// Block
enum BlockEvent {
CREATED,
DELETED
}
int NUMBER_OF_BLOCK_EVENT = 2;
void handleBlockEvent(BREthereumBlock block, BlockEvent event,
Status status,
String errorDescription);
// Transaction
enum TransactionEvent {
ADDED,
REMOVED,
CREATED,
SIGNED,
SUBMITTED,
BLOCKED, // aka confirmed
ERRORED,
GAS_ESTIMATE_UPDATED,
BLOCK_CONFIRMATIONS_UPDATED
}
int NUMBER_OF_TRANSACTION_EVENTS = 9;
void handleTransactionEvent(BREthereumWallet wallet,
BREthereumTransaction transaction,
TransactionEvent event,
Status status,
String errorDescription);
}
// Light Node
WeakReference<Client> client;
WeakReference<Listener> listener;
// Network
BREthereumNetwork network;
public BREthereumNetwork getNetwork () {
return network;
}
// Account
BREthereumAccount account;
public BREthereumAccount getAccount() {
return account;
}
public String getAddress () {
return account.getPrimaryAddress();
}
public byte[] getAddressPublicKey () {
return account.getPrimaryAddressPublicKey();
}
// Wallet
// We hold a mapping, from identifier to wallet, for all wallets held/managed by this node.
// The Core already holds wallets and thus we don't actually need to 'duplicate' that
// functionality; however, to not hold wallets here would mean that every getWallet(), every
// event handler would need to create another Wallet (feeling like a 'value type'). We don't
// do that - but we could, and might some day.
// We could hold a WeakReference (and we probably should) - but, at least with the current
// Android app, we witnessed wallets being reclaimed between each event update. The consequence
// was that we re-created the wallets each time; only to have them reclaimed. Now, that is
// actually not that big a deal and it should disappear completely when the Android app holds
// on to wallets that have transactions.
// Of course, if the wallet shows up, then it is in Core Ethereum, and it shouldn't be
// a WeakReference() - since it clearly exists in Core. We'll leave this as a string
// reference and explicitly delete wallets on a 'DELETE' event.
protected Map<Long, BREthereumWallet> wallets = new HashMap<>();
protected synchronized BREthereumWallet walletLookupOrCreate(long wid, BREthereumToken token) {
BREthereumWallet wallet = wallets.get(wid);
// If we never had a wallet, then create one.
if (null == wallet) {
// If `token` is null, then lookup the token for wallet.
if (null == token) {
long tokenRef = jniLightNodeWalletGetToken(wid);
if (0 != tokenRef)
token = lookupTokenByReference (tokenRef);
}
wallet = (null == token
? new BREthereumWallet(this, wid, account, network)
: new BREthereumWallet(this, wid, account, network, token));
wallets.put(wid, wallet);
}
return wallet;
}
public BREthereumWallet getWallet () {
long wid = jniLightNodeGetWallet();
return walletLookupOrCreate (wid, null);
}
public BREthereumWallet getWallet(BREthereumToken token) {
long wid = jniLightNodeGetWalletToken(token.getIdentifier());
return walletLookupOrCreate(wid, token);
}
// TODO: Remove once 'client callbacks' are LightNode trampolines
public BREthereumWallet getWalletByIdentifier (long wid) {
return walletLookupOrCreate(wid, null);
}
// Transaction
// We'll hold a mapping, from identifier to transaction, for all transactions.
protected Map<Long, WeakReference<BREthereumTransaction>> transactions = new HashMap<>();
protected synchronized BREthereumTransaction transactionLookupOrCreate(long tid) {
WeakReference<BREthereumTransaction> transactionRef = transactions.get(tid);
if (null == transactionRef || null == transactionRef.get()) {
long tokenReference = jniTransactionGetToken(tid);
transactionRef = new WeakReference<>(
new BREthereumTransaction(this, tid,
(0 == tokenReference
? BREthereumAmount.Unit.ETHER_ETHER
: BREthereumAmount.Unit.TOKEN_DECIMAL)));
transactions.put(tid, transactionRef);
}
return transactionRef.get();
}
// Block
protected Map<Long, BREthereumBlock> blocks = new HashMap<>();
protected synchronized BREthereumBlock blockLookupOrCreate (long bid) {
BREthereumBlock block = blocks.get(bid);
if (null == block) {
block = new BREthereumBlock(this, bid);
blocks.put (bid, block);
}
return block;
}
public long getBlockHeight () {
return jniLightNodeGetBlockHeight();
}
// Tokens
protected final HashMap<String, BREthereumToken> tokensByAddress = new HashMap<>();
protected final HashMap<Long, BREthereumToken> tokensByReference = new HashMap<>();
protected BREthereumToken[] tokens = null;
protected BREthereumToken tokenBRD;
private boolean tokensNeeded = true;
protected synchronized void initializeTokens () {
if (tokensNeeded) {
tokensNeeded = false;
long[] references = jniTokenAll();
tokens = new BREthereumToken[references.length];
for (int i = 0; i < references.length; i++)
tokens[i] = new BREthereumToken(references[i]);
tokensByReference.clear();
tokensByAddress.clear();
for (BREthereumToken token : tokens) {
System.err.println("Token: " + token.getSymbol());
tokensByReference.put(token.getIdentifier(), token);
tokensByAddress.put(token.getAddress().toLowerCase(), token);
}
tokenBRD = lookupTokenByReference(jniGetTokenBRD());
}
}
public BREthereumToken[] getTokens () {
initializeTokens();
return tokens;
}
public BREthereumToken getBRDToken () {
initializeTokens();
return tokenBRD;
}
public BREthereumToken lookupToken (String address) {
ensureValidAddress(address);
initializeTokens();;
return tokensByAddress.get(address.toLowerCase());
}
protected BREthereumToken lookupTokenByReference (long reference) {
initializeTokens();
return tokensByReference.get(reference);
}
// Constructor
public BREthereumLightNode(Client client, BREthereumNetwork network, String paperKey, String[] wordList) {
this(BREthereumLightNode.jniCreateLightNode(client, network.getIdentifier(), paperKey, wordList),
client, network);
}
public BREthereumLightNode(Client client, BREthereumNetwork network, byte[] publicKey) {
this(BREthereumLightNode.jniCreateLightNode_PublicKey(client, network.getIdentifier(), publicKey),
client, network);
}
private BREthereumLightNode(long identifier, Client client, BREthereumNetwork network) {
super(identifier);
// `this` is the JNI listener, using the `trampoline` functions to invoke
// the installed `Listener`.
jniAddListener(null);
this.client = new WeakReference<>(client);
this.network = network;
this.account = new BREthereumAccount(this, jniLightNodeGetAccount());
initializeTokens ();
}
public void addListener (Listener listener) {
this.listener = new WeakReference<>(listener);
}
protected Listener getListener () {
return null == listener ? null : listener.get();
}
// Connect // Disconnect
public boolean connect () {
return jniLightNodeConnect ();
}
public boolean disconnect () {
return jniLightNodeDisconnect ();
}
// Callback Announcements
// In the JNI Code, we had a problem directly accessing the Listener methods for the provided
// listener (see addListener()). So instead we'll access these methods below and then 'bounce'
// to method calls for the listener.
// These methods also give us a chance to convert the `event`, as a `long`, to the Event.
protected void trampolineWalletEvent (int wid, int event, int status, String errorDescription) {
Listener l = getListener();
if (null == l) return;
// TODO: Resolve Bug
if (event < 0 || event >= Listener.NUMBER_OF_WALLET_EVENTS) return;
if (status < 0 || status >= Listener.NUMBER_OF_STATUS_EVENTS) return;
// Lookup the wallet - this will create the wallet if it doesn't exist. Thus, if the
// `event` is `create`, we get a wallet; and even, if the `event` is `delete`, we get a
// wallet too.
BREthereumWallet wallet = walletLookupOrCreate(wid, null);
// Invoke handler
l.handleWalletEvent(wallet,
Listener.WalletEvent.values()[(int) event],
Listener.Status.values()[(int) status],
errorDescription);
}
protected void trampolineBlockEvent (int bid, int event, int status, String errorDescription) {
Listener l = getListener();
if (null == l) return;
// TODO: Resolve Bug
if (event < 0 || event >= Listener.NUMBER_OF_BLOCK_EVENT) return;
if (status < 0 || status >= Listener.NUMBER_OF_STATUS_EVENTS) return;
// Nothing, at this point
BREthereumBlock block = blockLookupOrCreate(bid);
l.handleBlockEvent (block,
Listener.BlockEvent.values()[(int) event],
Listener.Status.values()[(int) status],
errorDescription);
}
protected void trampolineTransactionEvent (int wid, int tid, int event, int status, String errorDescription) {
Listener l = getListener();
if (null == l) return;
// TODO: Resolve Bug
if (event < 0 || event >= Listener.NUMBER_OF_TRANSACTION_EVENTS) return;
if (status < 0 || status >= Listener.NUMBER_OF_STATUS_EVENTS) return;
BREthereumWallet wallet = walletLookupOrCreate(wid, null);
BREthereumTransaction transaction = transactionLookupOrCreate (tid);
l.handleTransactionEvent(wallet, transaction,
Listener.TransactionEvent.values()[(int) event],
Listener.Status.values()[(int) status],
errorDescription);
}
protected void trampolineGetBalance(int wid, String address, int rid) {
client.get().getBalance(wid, address, rid);
}
protected void trampolineGetGasPrice(int wid, int rid) {
client.get().getGasPrice(wid, rid);
}
protected void trampolineGetGasEstimate(int wid, int tid, String to, String amount, String data, int rid) {
client.get().getGasEstimate(wid, tid, to, amount, data, rid);
}
protected void trampolineSubmitTransaction(int wid, int tid, String rawTransaction, int rid) {
client.get().submitTransaction(wid, tid, rawTransaction, rid);
}
protected void trampolineGetTransactions(String address, int rid) {
client.get().getTransactions(address, rid);
}
protected void trampolineGetLogs (String contract, String address, String event, int rid) {
client.get().getLogs(contract, address, event, rid);
}
protected void trampolineGetBlockNumber (int rid) {
client.get().getBlockNumber(rid);
}
protected void trampolineGetNonce (String address, int rid) {
client.get().getNonce(address, rid);
}
public static boolean addressIsValid (String address) {
assert (null != address);
return jniAddressIsValid(address);
}
static void ensureValidAddress (String address) {
if (!addressIsValid(address))
throw new RuntimeException ("Invalid Ethereum Address");
}
// JNI: Constructors
protected static native long jniCreateLightNode(Client client, long network, String paperKey, String[] wordList);
protected static native long jniCreateLightNode_PublicKey(Client client, long network, byte[] publicKey);
protected static native boolean jniAddressIsValid (String address);
protected native void jniAddListener (Listener listener);
// JNI: Announcements
protected native void jniAnnounceTransaction(int id,
String hash,
String from,
String to,
String contract,
String amount, // value
String gasLimit,
String gasPrice,
String data,
String nonce,
String gasUsed,
String blockNumber,
String blockHash,
String blockConfirmations,
String blockTransactionIndex,
String blockTimestamp,
// cumulative gas used,
// confirmations
// txreceipt_status
String isError);
protected native void jniAnnounceLog(int id,
String hash,
String contract,
String[] topics,
String data,
String gasPrice,
String gasUsed,
String logIndex,
String blockNumber,
String blockTransactionIndex,
String blockTimestamp);
protected native void jniAnnounceBalance (int wid, String balance, int rid);
protected native void jniAnnounceGasPrice (int wid, String gasPrice, int rid);
protected native void jniAnnounceGasEstimate (int wid, int tid, String gasEstimate, int rid);
protected native void jniAnnounceSubmitTransaction (int wid, int tid, String hash, int rid);
protected native void jniAnnounceBlockNumber (String blockNumber, int rid);
protected native void jniAnnounceNonce (String address, String nonce, int rid);
protected native void jniAnnounceToken (String address,
String symbol,
String name,
String description,
int decimals,
String defaultGasLimit,
String defaultGasPrice,
int rid);
// JNI: Account & Address
protected native long jniLightNodeGetAccount();
protected native String jniGetAccountPrimaryAddress(long accountId);
protected native byte[] jniGetAccountPrimaryAddressPublicKey(long accountId);
protected native byte[] jniGetAccountPrimaryAddressPrivateKey(long accountId, String paperKey);
// JNI: Wallet
protected native long jniLightNodeGetWallet();
protected native long jniLightNodeGetWalletToken (long tokenId);
protected native long jniLightNodeCreateWalletToken(long tokenId);
protected native long jniLightNodeWalletGetToken (long wid);
protected native String jniGetWalletBalance (long walletId, long unit);
protected native void jniEstimateWalletGasPrice (long walletId);
protected native void jniForceWalletBalanceUpdate(long wallet);
protected native long jniWalletGetDefaultGasPrice (long wallet);
protected native void jniWalletSetDefaultGasPrice (long wallet, long value);
protected native long jniWalletGetDefaultGasLimit (long wallet);
protected native void jniWalletSetDefaultGasLimit (long wallet, long value);
// JNI: Wallet Transactions
protected native long jniCreateTransaction (long walletId,
String to,
String amount,
long amountUnit);
protected native long jniCreateTransactionGeneric(long walletId,
String to,
String amount,
long amountUnit,
String gasPrice,
long gasPriceUnit,
String gasLimit,
String data);
protected native void jniSignTransaction (long walletId,
long transactionId,
String paperKey);
protected native void jniSignTransactionWithPrivateKey(long walletId,
long transactionId,
byte[] privateKey);
protected native void jniSubmitTransaction (long walletId,
long transactionId);
protected native long[] jniGetTransactions (long walletId);
protected native void jniTransactionEstimateGas(long walletId,
long transactionId);
protected native String jniTransactionEstimateFee (long walletId,
String amount,
long amountUnit,
long resultUnit);
// JNI: Transactions
protected native boolean jniTransactionHasToken (long transactionId);
protected native String jniTransactionGetAmount(long transactionId, long unit);
protected native String jniTransactionGetFee (long transactionId, long unit);
protected native String jniTransactionSourceAddress (long transactionId);
protected native String jniTransactionTargetAddress (long transactionId);
protected native String jniTransactionGetHash (long transactionId);
protected native String jniTransactionGetGasPrice (long transactionId, long unit);
protected native long jniTransactionGetGasLimit (long transactionId);
protected native long jniTransactionGetGasUsed (long transactionId);
protected native long jniTransactionGetNonce (long transactionId);
protected native long jniTransactionGetBlockNumber (long transactionId);
protected native long jniTransactionGetBlockTimestamp (long transactionId);
protected native long jniTransactionGetBlockConfirmations (long transactionId);
protected native long jniTransactionGetToken (long transactionId);
protected native boolean jniTransactionIsConfirmed (long transactionId);
protected native boolean jniTransactionIsSubmitted (long transactionId);
// JNI: Tokens
// protected native String jniTokenGetAddress (long tokenId);
// JNI: Block
protected native long jniLightNodeGetBlockHeight ();
protected native long jniBlockGetNumber (long bid);
protected native long jniBlockGetTimestamp (long bid);
protected native String jniBlockGetHash (long bid);
// JNI: Connect & Disconnect
protected native boolean jniLightNodeConnect ();
protected native boolean jniLightNodeDisconnect ();
// JNI: Initialize
protected static native void initializeNative();
static {
initializeNative();
}
// Support
// Reference
static class Reference {
WeakReference<BREthereumLightNode> node;
long identifier;
Reference(BREthereumLightNode node, long identifier) {
this.node = new WeakReference<>(node);
this.identifier = identifier;
}
}
// Reference With Default Unit
static class ReferenceWithDefaultUnit extends Reference {
protected BREthereumAmount.Unit defaultUnit;
protected boolean defaultUnitUsesToken = false;
public BREthereumAmount.Unit getDefaultUnit() {
return defaultUnit;
}
public void setDefaultUnit(BREthereumAmount.Unit unit) {
validUnitOrException(unit);
this.defaultUnit = unit;
}
// Constructor
protected ReferenceWithDefaultUnit (BREthereumLightNode node,
long identifier,
BREthereumAmount.Unit unit) {
super(node, identifier);
this.defaultUnit = unit;
this.defaultUnitUsesToken = unit.isTokenUnit();
}
// Support
protected boolean validUnit(BREthereumAmount.Unit unit) {
return (!defaultUnitUsesToken
? (unit == BREthereumAmount.Unit.ETHER_WEI || unit == BREthereumAmount.Unit.ETHER_GWEI || unit == BREthereumAmount.Unit.ETHER_ETHER)
: (unit == BREthereumAmount.Unit.TOKEN_DECIMAL || unit == BREthereumAmount.Unit.TOKEN_INTEGER));
}
protected void validUnitOrException (BREthereumAmount.Unit unit) {
if (!validUnit(unit))
throw new IllegalArgumentException("Invalid Unit for instance type: " + unit.toString());
}
}
}
|
package org.eclipse.hawkbit.repository.jpa;
import static org.fest.assertions.api.Assertions.assertThat;
import java.io.ByteArrayInputStream;
import java.util.List;
import java.util.Random;
import org.eclipse.hawkbit.repository.jpa.model.JpaSoftwareModule;
import org.eclipse.hawkbit.repository.model.DistributionSet;
import org.eclipse.hawkbit.repository.model.Target;
import org.eclipse.hawkbit.repository.model.TenantMetaData;
import org.eclipse.hawkbit.repository.report.model.TenantUsage;
import org.eclipse.hawkbit.repository.test.util.WithSpringAuthorityRule;
import org.junit.Test;
import ru.yandex.qatools.allure.annotations.Description;
import ru.yandex.qatools.allure.annotations.Features;
import ru.yandex.qatools.allure.annotations.Stories;
@Features("Component Tests - Repository")
@Stories("System Management")
public class SystemManagementTest extends AbstractJpaIntegrationTestWithMongoDB {
@Test
@Description("Ensures that you can create a tenant without setting the necessary security context which holds a current tenant")
public void createInitialTenantWithoutSecurityContext() {
securityRule.clear();
final String tenantToBeCreated = "newTenantToCreate";
final TenantMetaData tenantMetadata = systemManagement.getTenantMetadata(tenantToBeCreated);
assertThat(tenantMetadata).isNotNull();
}
@Test
@Description("Ensures that findTenants returns all tenants and not only restricted to the tenant which currently is logged in")
public void findTenantsReturnsAllTenantsNotOnlyWhichLoggedIn() throws Exception {
assertThat(systemManagement.findTenants()).hasSize(1);
createTestTenantsForSystemStatistics(2, 0, 0, 0);
assertThat(systemManagement.findTenants()).hasSize(3);
}
@Test
@Description("Checks that the system report calculates correctly the artifact size of all tenants in the system. It ignores deleted software modules with their artifacts.")
public void systemUsageReportCollectsArtifactsOfAllTenants() throws Exception {
// Prepare tenants
createTestTenantsForSystemStatistics(2, 1234, 0, 0);
// overall data
assertThat(systemManagement.getSystemUsageStatistics().getOverallArtifacts()).isEqualTo(2);
assertThat(systemManagement.getSystemUsageStatistics().getOverallArtifactVolumeInBytes()).isEqualTo(1234 * 2);
// per tenant data
final List<TenantUsage> tenants = systemManagement.getSystemUsageStatistics().getTenants();
assertThat(tenants).hasSize(3);
assertThat(tenants).containsOnly(new TenantUsage("default"),
new TenantUsage("tenant0").setArtifacts(1).setOverallArtifactVolumeInBytes(1234),
new TenantUsage("tenant1").setArtifacts(1).setOverallArtifactVolumeInBytes(1234));
}
@Test
@Description("Checks that the system report calculates correctly the targets size of all tenants in the system")
public void systemUsageReportCollectsTargetsOfAllTenants() throws Exception {
// Prepare tenants
createTestTenantsForSystemStatistics(2, 0, 100, 0);
// overall data
assertThat(systemManagement.getSystemUsageStatistics().getOverallTargets()).isEqualTo(200);
assertThat(systemManagement.getSystemUsageStatistics().getOverallActions()).isEqualTo(0);
// per tenant data
final List<TenantUsage> tenants = systemManagement.getSystemUsageStatistics().getTenants();
assertThat(tenants).hasSize(3);
assertThat(tenants).containsOnly(new TenantUsage("default"), new TenantUsage("tenant0").setTargets(100),
new TenantUsage("tenant1").setTargets(100));
}
@Test
@Description("Checks that the system report calculates correctly the actions size of all tenants in the system")
public void systemUsageReportCollectsActionsOfAllTenants() throws Exception {
// Prepare tenants
createTestTenantsForSystemStatistics(2, 0, 100, 2);
// 2 tenants, 100 targets each, 2 deployments per target => 400
assertThat(systemManagement.getSystemUsageStatistics().getOverallActions()).isEqualTo(400);
// per tenant data
final List<TenantUsage> tenants = systemManagement.getSystemUsageStatistics().getTenants();
assertThat(tenants).hasSize(3);
assertThat(tenants).containsOnly(new TenantUsage("default"),
new TenantUsage("tenant0").setTargets(100).setActions(200),
new TenantUsage("tenant1").setTargets(100).setActions(200));
}
private byte[] createTestTenantsForSystemStatistics(final int tenants, final int artifactSize, final int targets,
final int updates) throws Exception {
final Random randomgen = new Random();
final byte random[] = new byte[artifactSize];
randomgen.nextBytes(random);
for (int i = 0; i < tenants; i++) {
final String tenantname = "tenant" + i;
securityRule.runAs(WithSpringAuthorityRule.withUserAndTenant("bumlux", tenantname), () -> {
systemManagement.getTenantMetadata(tenantname);
if (artifactSize > 0) {
createTestArtifact(random);
createDeletedTestArtifact(random);
}
if (targets > 0) {
final List<Target> createdTargets = createTestTargets(targets);
if (updates > 0) {
for (int x = 0; x < updates; x++) {
final DistributionSet ds = testdataFactory.createDistributionSet("to be deployed" + x,
true);
deploymentManagement.assignDistributionSet(ds, createdTargets);
}
}
}
return null;
});
}
return random;
}
private List<Target> createTestTargets(final int targets) {
return targetManagement
.createTargets(testdataFactory.generateTargets(targets, "testTargetOfTenant", "testTargetOfTenant"));
}
private void createTestArtifact(final byte[] random) {
JpaSoftwareModule sm = new JpaSoftwareModule(softwareManagement.findSoftwareModuleTypeByKey("os"), "name 1",
"version 1", null, null);
sm = softwareModuleRepository.save(sm);
artifactManagement.createLocalArtifact(new ByteArrayInputStream(random), sm.getId(), "file1", false);
}
private void createDeletedTestArtifact(final byte[] random) {
final DistributionSet ds = testdataFactory.createDistributionSet("deleted garbage", true);
ds.getModules().stream().forEach(module -> {
artifactManagement.createLocalArtifact(new ByteArrayInputStream(random), module.getId(), "file1", false);
softwareManagement.deleteSoftwareModule(module);
});
}
}
|
// $Id: DObject.java,v 1.76 2004/07/07 04:08:41 mdb Exp $
package com.threerings.presents.dobj;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import com.samskivert.util.ListUtil;
import com.samskivert.util.StringUtil;
import com.threerings.io.Streamable;
import com.threerings.presents.Log;
/**
* The distributed object forms the foundation of the Presents system. All
* information shared among users of the system is done via distributed
* objects. A distributed object has a set of listeners. These listeners
* have access to the object or a proxy of the object and therefore have
* access to the data stored in the object's members at all times.
*
* <p> Additionally, an object as a set of subscribers. Subscribers manage
* the lifespan of the object; while a subscriber is subscribed, the
* listeners registered with an object will be notified of events. When
* the subscriber unsubscribes, the object becomes non-live and the
* listeners are no longer notified. <em>Note:</em> on the server, object
* subscription is merely a formality as all objects remain live all the
* time, so <em>do not</em> depend on event notifications ceasing when a
* subscriber has relinquished its subscription. Always unregister all
* listeners when they no longer need to hear from an object.
*
* <p> When there is any change to the the object's fields data, an event
* is generated which is dispatched to all listeners of the object,
* notifying them of that change and effecting that change to the copy of
* the object maintained at each client. In this way, both a respository
* of shared information and a mechanism for asynchronous notification are
* made available as a fundamental application building blocks.
*
* <p> To define what information is shared, an application creates a
* distributed object declaration which is much like a class declaration
* except that it is transformed into a proper derived class of
* <code>DObject</code> by a script. A declaration looks something like
* this:
*
* <pre>
* public dclass RoomObject
* {
* public String description;
* public int[] occupants;
* }
* </pre>
*
* which is converted into an actual Java class that looks like this:
*
* <pre>
* public class RoomObject extends DObject
* {
* public String getDescription ()
* {
* // ...
* }
*
* public void setDescription (String description)
* {
* // ...
* }
*
* public int[] getOccupants ()
* {
* // ...
* }
*
* public void setOccupants (int[] occupants)
* {
* // ...
* }
*
* public void setOccupantsAt (int index, int value)
* {
* // ...
* }
* }
* </pre>
*
* These method calls on the actual distributed object will result in the
* proper attribute change events being generated and dispatched.
*
* <p> Note that distributed object fields can be any of the following set
* of primitive types:
*
* <code><pre>
* boolean, byte, short, int, long, float, double
* Boolean, Byte, Short, Integer, Long, Float, Double, String
* boolean[], byte[], short[], int[], long[], float[], double[], String[]
* </pre></code>
*
* Fields of type {@link Streamable} can also be used.
*/
public class DObject implements Streamable
{
public DObject ()
{
_fields = (Field[])_ftable.get(getClass());
if (_fields == null) {
_fields = getClass().getFields();
Arrays.sort(_fields, FIELD_COMP);
_ftable.put(getClass(), _fields);
}
}
/**
* Returns the object id of this object. All objects in the system
* have a unique object id.
*/
public int getOid ()
{
return _oid;
}
/**
* Returns the dobject manager under the auspices of which this object
* operates. This could be <code>null</code> if the object is not
* active.
*/
public DObjectManager getManager ()
{
return _omgr;
}
/**
* Don't call this function! Go through the distributed object manager
* instead to ensure that everything is done on the proper thread.
* This function can only safely be called directly when you know you
* are operating on the omgr thread (you are in the middle of a call
* to <code>objectAvailable</code> or to a listener callback).
*
* @see DObjectManager#subscribeToObject
*/
public void addSubscriber (Subscriber sub)
{
// only add the subscriber if they're not already there
Object[] subs = ListUtil.testAndAdd(_subs, sub);
if (subs != null) {
// Log.info("Adding subscriber " + which() + ": " + sub + ".");
_subs = subs;
_scount++;
} else {
Log.warning("Refusing subscriber that's already in the list " +
"[dobj=" + which() + ", subscriber=" + sub + "]");
Thread.dumpStack();
}
}
/**
* Don't call this function! Go through the distributed object manager
* instead to ensure that everything is done on the proper thread.
* This function can only safely be called directly when you know you
* are operating on the omgr thread (you are in the middle of a call
* to <code>objectAvailable</code> or to a listener callback).
*
* @see DObjectManager#unsubscribeFromObject
*/
public void removeSubscriber (Subscriber sub)
{
if (ListUtil.clear(_subs, sub) != null) {
// if we removed something, check to see if we just removed
// the last subscriber from our list; we also want to be sure
// that we're still active otherwise there's no need to notify
// our objmgr because we don't have one
if (--_scount == 0 && _omgr != null) {
_omgr.removedLastSubscriber(this, _deathWish);
}
}
}
/**
* Instructs this object to request to have a fork stuck in it when
* its last subscriber is removed.
*/
public void setDestroyOnLastSubscriberRemoved (boolean deathWish)
{
_deathWish = deathWish;
}
/**
* Adds an event listener to this object. The listener will be
* notified when any events are dispatched on this object that match
* their particular listener interface.
*
* <p> Note that the entity adding itself as a listener should have
* obtained the object reference by subscribing to it or should be
* acting on behalf of some other entity that subscribed to the
* object, <em>and</em> that it must be sure to remove itself from the
* listener list (via {@link #removeListener}) when it is done because
* unsubscribing from the object (done by whatever entity subscribed
* in the first place) is not guaranteed to result in the listeners
* added through that subscription being automatically removed (in
* most cases, they definitely will not be removed).
*
* @param listener the listener to be added.
*
* @see EventListener
* @see AttributeChangeListener
* @see SetListener
* @see OidListListener
*/
public void addListener (ChangeListener listener)
{
// only add the listener if they're not already there
Object[] els = ListUtil.testAndAdd(_listeners, listener);
if (els != null) {
_listeners = els;
} else {
Log.warning("Refusing repeat listener registration " +
"[dobj=" + which() + ", list=" + listener + "]");
Thread.dumpStack();
}
}
/**
* Removes an event listener from this object. The listener will no
* longer be notified when events are dispatched on this object.
*
* @param listener the listener to be removed.
*/
public void removeListener (ChangeListener listener)
{
ListUtil.clear(_listeners, listener);
}
/**
* Provides this object with an entity that can be used to validate
* subscription requests and events before they are processed. The
* access controller is handy for ensuring that clients are behaving
* as expected and for preventing impermissible modifications or event
* dispatches on a distributed object.
*/
public void setAccessController (AccessController controller)
{
_controller = controller;
}
/**
* Returns a reference to the access controller in use by this object
* or null if none has been configured.
*/
public AccessController getAccessController ()
{
return _controller;
}
/**
* Get the DSet with the specified name.
*/
public final DSet getSet (String setName)
{
try {
return (DSet) getField(setName).get(this);
} catch (Exception e) {
throw new IllegalArgumentException("No such set: " + setName);
}
}
/**
* Request to have the specified item added to the specified DSet.
*/
public void addToSet (String setName, DSet.Entry entry)
{
getSet(setName); // validate the set
requestEntryAdd(setName, entry);
}
/**
* Request to have the specified item updated in the specified DSet.
*/
public void updateSet (String setName, DSet.Entry entry)
{
getSet(setName); // validate the set
requestEntryUpdate(setName, entry);
}
/**
* Request to have the specified key removed from the specified DSet.
*/
public void removeFromSet (String setName, Comparable key)
{
getSet(setName); // validate the set
requestEntryRemove(setName, key);
}
/**
* At times, an entity on the server may need to ensure that events it
* has queued up have made it through the event queue and are applied
* to their respective objects before a service may safely be
* undertaken again. To make this possible, it can acquire a lock on a
* distributed object, generate the events in question and then
* release the lock (via a call to <code>releaseLock</code>) which
* will queue up a final event, the processing of which will release
* the lock. Thus the lock will not be released until all of the
* previously generated events have been processed. If the service is
* invoked again before that lock is released, the associated call to
* <code>acquireLock</code> will fail and the code can respond
* accordingly. An object may have any number of outstanding locks as
* long as they each have a unique name.
*
* @param name the name of the lock to acquire.
*
* @return true if the lock was acquired, false if the lock was not
* acquired because it has not yet been released from a previous
* acquisition.
*
* @see #releaseLock
*/
public boolean acquireLock (String name)
{
// check for the existence of the lock in the list and add it if
// it's not already there
Object[] list = ListUtil.testAndAddEqual(_locks, name);
if (list == null) {
// a null list means the object was already in the list
return false;
} else {
// a non-null list means the object was added
_locks = list;
return true;
}
}
/**
* Queues up an event that when processed will release the lock of the
* specified name.
*
* @see #acquireLock
*/
public void releaseLock (String name)
{
// queue up a release lock event
postEvent(new ReleaseLockEvent(_oid, name));
}
/**
* Don't call this function! It is called by a remove lock event when
* that event is processed and shouldn't be called at any other time.
* If you mean to release a lock that was acquired with
* <code>acquireLock</code> you should be using
* <code>releaseLock</code>.
*
* @see #acquireLock
* @see #releaseLock
*/
protected void clearLock (String name)
{
// clear the lock from the list
if (ListUtil.clearEqual(_locks, name) == null) {
// complain if we didn't find the lock
Log.info("Unable to clear non-existent lock [lock=" + name +
", dobj=" + this + "].");
}
}
/**
* Requests that this distributed object be destroyed. It does so by
* queueing up an object destroyed event which the server will
* validate and process.
*/
public void destroy ()
{
postEvent(new ObjectDestroyedEvent(_oid));
}
/**
* Checks to ensure that the specified subscriber has access to this
* object. This will be called before satisfying a subscription
* request. If an {@link AccessController} has been specified for this
* object, it will be used to determine whether or not to allow the
* subscription request. If no controller is set, the subscription
* will be allowed.
*
* @param sub the subscriber that will subscribe to this object.
*
* @return true if the subscriber has access to the object, false if
* they do not.
*/
public boolean checkPermissions (Subscriber sub)
{
if (_controller != null) {
return _controller.allowSubscribe(this, sub);
} else {
return true;
}
}
public boolean checkPermissions (DEvent event)
{
if (_controller != null) {
return _controller.allowDispatch(this, event);
} else {
return true;
}
}
/**
* Called by the distributed object manager after it has applied an
* event to this object. This dispatches an event notification to all
* of the listeners registered with this object.
*
* @param event the event that was just applied.
*/
public void notifyListeners (DEvent event)
{
if (_listeners == null) {
return;
}
int llength = _listeners.length;
for (int i = 0; i < llength; i++) {
Object listener = _listeners[i];
if (listener == null) {
continue;
}
try {
// do any event specific notifications
event.notifyListener(listener);
// and notify them if they are listening for all events
if (listener instanceof EventListener) {
((EventListener)listener).eventReceived(event);
}
} catch (Exception e) {
Log.warning("Listener choked during notification " +
"[list=" + listener + ", event=" + event + "].");
Log.logStackTrace(e);
}
}
}
/**
* Called by the distributed object manager after it has applied an
* event to this object. This dispatches an event notification to all
* of the proxy listeners registered with this object.
*
* @param event the event that was just applied.
*/
public void notifyProxies (DEvent event)
{
if (_subs == null || event.isPrivate()) {
return;
}
for (int ii = 0, ll = _subs.length; ii < ll; ii++) {
Object sub = _subs[ii];
try {
if (sub != null && sub instanceof ProxySubscriber) {
((ProxySubscriber)sub).eventReceived(event);
}
} catch (Exception e) {
Log.warning("Proxy choked during notification " +
"[sub=" + sub + ", event=" + event + "].");
Log.logStackTrace(e);
}
}
}
/**
* Sets the named attribute to the specified value. This is only used
* by the internals of the event dispatch mechanism and should not be
* called directly by users. Use the generated attribute setter
* methods instead.
*/
public void setAttribute (String name, Object value)
throws ObjectAccessException
{
try {
// for values that contain other values (arrays and DSets), we
// need to clone them before putting them in the object
// because otherwise a subsequent event might come along and
// modify these values before the networking thread has had a
// chance to propagate this event to the clients
// i wish i could just call value.clone() but Object declares
// clone() to be inaccessible, so we must cast the values to
// their actual types to gain access to the widened clone()
// methods
if (value instanceof DSet) {
value = ((DSet)value).clone();
} else if (value instanceof int[]) {
value = ((int[])value).clone();
} else if (value instanceof String[]) {
value = ((String[])value).clone();
} else if (value instanceof byte[]) {
value = ((byte[])value).clone();
} else if (value instanceof long[]) {
value = ((long[])value).clone();
} else if (value instanceof float[]) {
value = ((float[])value).clone();
} else if (value instanceof short[]) {
value = ((short[])value).clone();
} else if (value instanceof double[]) {
value = ((double[])value).clone();
}
// now actually set the value
getField(name).set(this, value);
} catch (Exception e) {
String errmsg = "Attribute setting failure [name=" + name +
", value=" + value +
", vclass=" + value.getClass().getName() + "].";
throw new ObjectAccessException(errmsg, e);
}
}
/**
* Looks up the named attribute and returns a reference to it. This
* should only be used by the internals of the event dispatch
* mechanism and should not be called directly by users. Use the
* generated attribute getter methods instead.
*/
public Object getAttribute (String name)
throws ObjectAccessException
{
try {
return getField(name).get(this);
} catch (Exception e) {
String errmsg = "Attribute getting failure [name=" + name + "].";
throw new ObjectAccessException(errmsg, e);
}
}
/**
* Posts a message event on this distrubuted object.
*/
public void postMessage (String name, Object[] args)
{
postEvent(new MessageEvent(_oid, name, args));
}
/**
* Posts the specified event either to our dobject manager or to the
* compound event for which we are currently transacting.
*/
public void postEvent (DEvent event)
{
if (_tevent != null) {
_tevent.postEvent(event);
} else if (_omgr != null) {
_omgr.postEvent(event);
} else {
Log.warning("Unable to post event, object has no omgr " +
"[oid=" + getOid() + ", class=" + getClass().getName() +
", event=" + event + "].");
}
}
/**
* Returns true if this object is active and registered with the
* distributed object system. If an object is created via
* <code>DObjectManager.createObject</code> it will be active until
* such time as it is destroyed.
*/
public final boolean isActive ()
{
return _omgr != null;
}
/**
* Don't call this function! It initializes this distributed object
* with the supplied distributed object manager. This is called by the
* distributed object manager when an object is created and registered
* with the system.
*
* @see DObjectManager#createObject
*/
public void setManager (DObjectManager omgr)
{
_omgr = omgr;
}
/**
* Don't call this function. It is called by the distributed object
* manager when an object is created and registered with the system.
*
* @see DObjectManager#createObject
*/
public void setOid (int oid)
{
_oid = oid;
}
/**
* Generates a concise string representation of this object.
*/
public String which ()
{
StringBuffer buf = new StringBuffer();
which(buf);
return buf.toString();
}
/**
* Used to briefly describe this distributed object.
*/
protected void which (StringBuffer buf)
{
buf.append(StringUtil.shortClassName(this));
buf.append(":").append(_oid);
}
/**
* Generates a string representation of this object.
*/
public String toString ()
{
StringBuffer buf = new StringBuffer();
toString(buf);
return buf.append("]").toString();
}
/**
* Generates a string representation of this object.
*/
protected void toString (StringBuffer buf)
{
StringUtil.fieldsToString(buf, this, "\n");
if (buf.length() > 0) {
buf.insert(0, "\n");
}
buf.insert(0, _oid);
buf.insert(0, "[oid=");
}
/**
* Begins a transaction on this distributed object. In some
* situations, it is desirable to cause multiple changes to
* distributed object fields in one unified operation. Starting a
* transaction causes all subsequent field modifications to be stored
* in a single compound event which can then be committed, dispatching
* and applying all included events in a single group. Additionally,
* the events are dispatched over the network in a single unit which
* can significantly enhance network efficiency.
*
* <p> When the transaction is complete, the caller must call {@link
* #commitTransaction} or {@link CompoundEvent#commit} to commit the
* transaction and release the object back to its normal
* non-transacting state. If the caller decides not to commit their
* transaction, they must call {@link #cancelTransaction} or {@link
* CompoundEvent#cancel} to cancel the transaction. Failure to do so
* will cause the pooch to be totally screwed.
*
* <p> Note: like all other distributed object operations,
* transactions are not thread safe. It is expected that a single
* thread will handle all distributed object operations and that
* thread will begin and complete a transaction before giving up
* control to unknown code which might try to operate on the
* transacting distributed object.
*
* <p> Note also: if the object is already engaged in a transaction, a
* transaction participant count will be incremented to note that an
* additional call to {@link #commitTransaction} is required before
* the transaction should actually be committed. Thus <em>every</em>
* call to {@link #startTransaction} must be accompanied by a call to
* either {@link #commitTransaction} or {@link
* #cancelTransaction}. Additionally, if any transaction participant
* cancels the transaction, the entire transaction is cancelled for
* all participants, regardless of whether the other participants
* attempted to commit the transaction.
*/
public void startTransaction ()
{
// sanity check
if (!isActive()) {
String errmsg = "Refusing to start transaction on inactive " +
"object [dobj=" + this + "]";
throw new IllegalArgumentException(errmsg);
} else if (_tevent != null) {
_tcount++;
} else {
_tevent = new CompoundEvent(this, _omgr);
}
}
/**
* Commits the transaction in which this distributed object is
* involved.
*
* @see CompoundEvent#commit
*/
public void commitTransaction ()
{
if (_tevent == null) {
String errmsg = "Cannot commit: not involved in a transaction " +
"[dobj=" + this + "]";
throw new IllegalStateException(errmsg);
}
// if we are nested, we decrement our nesting count rather than
// committing the transaction
if (_tcount > 0) {
_tcount
} else {
// we may actually be doing our final commit after someone
// already cancelled this transaction, so we need to perform
// the appropriate action at this point
if (_tcancelled) {
_tevent.cancel();
} else {
_tevent.commit();
}
}
}
/**
* Returns true if this object is in the middle of a transaction or
* false if it is not.
*/
public boolean inTransaction ()
{
return (_tevent != null);
}
/**
* Cancels the transaction in which this distributed object is
* involved.
*
* @see CompoundEvent#cancel
*/
public void cancelTransaction ()
{
if (_tevent == null) {
String errmsg = "Cannot cancel: not involved in a transaction " +
"[dobj=" + this + "]";
throw new IllegalStateException(errmsg);
}
// if we're in a nested transaction, make a note that it is to be
// cancelled when all parties commit and decrement the nest count
if (_tcount > 0) {
_tcancelled = true;
_tcount
} else {
_tevent.cancel();
}
}
/**
* Removes this object from participation in any transaction in which
* it might be taking part.
*/
protected void clearTransaction ()
{
// sanity check
if (_tcount != 0) {
Log.warning("Transaction cleared with non-zero nesting count " +
"[dobj=" + this + "].");
_tcount = 0;
}
// clear our transaction state
_tevent = null;
_tcancelled = false;
}
/**
* Called by derived instances when an attribute setter method was
* called.
*/
protected void requestAttributeChange (String name, Object value)
{
try {
// dispatch an attribute changed event
postEvent(new AttributeChangedEvent(
_oid, name, value, getAttribute(name)));
} catch (ObjectAccessException oae) {
Log.warning("Unable to request attributeChange [name=" + name +
", value=" + value + ", error=" + oae + "].");
}
}
/**
* Called by derived instances when an element updater method was
* called.
*/
protected void requestElementUpdate (String name, Object value, int index)
{
try {
// dispatch an attribute changed event
Object oldValue = Array.get(getAttribute(name), index);
postEvent(new ElementUpdatedEvent(
_oid, name, value, oldValue, index));
} catch (ObjectAccessException oae) {
Log.warning("Unable to request elementUpdate [name=" + name +
", value=" + value + ", index=" + index +
", error=" + oae + "].");
}
}
/**
* Calls by derived instances when an oid adder method was called.
*/
protected void requestOidAdd (String name, int oid)
{
// dispatch an object added event
postEvent(new ObjectAddedEvent(_oid, name, oid));
}
/**
* Calls by derived instances when an oid remover method was called.
*/
protected void requestOidRemove (String name, int oid)
{
// dispatch an object removed event
postEvent(new ObjectRemovedEvent(_oid, name, oid));
}
/**
* Calls by derived instances when a set adder method was called.
*/
protected void requestEntryAdd (String name, DSet.Entry entry)
{
try {
DSet set = (DSet)getAttribute(name);
// if we're on the authoritative server, we update the set
// immediately
boolean alreadyApplied = false;
if (_omgr != null && _omgr.isManager(this)) {
if (!set.add(entry)) {
Thread.dumpStack();
}
alreadyApplied = true;
}
// dispatch an entry added event
postEvent(new EntryAddedEvent(_oid, name, entry, alreadyApplied));
} catch (ObjectAccessException oae) {
Log.warning("Unable to request entryAdd [name=" + name +
", entry=" + entry + ", error=" + oae + "].");
}
}
/**
* Calls by derived instances when a set remover method was called.
*/
protected void requestEntryRemove (String name, Comparable key)
{
try {
DSet set = (DSet)getAttribute(name);
// if we're on the authoritative server, we update the set
// immediately
DSet.Entry oldEntry = null;
if (_omgr != null && _omgr.isManager(this)) {
oldEntry = set.get(key);
set.removeKey(key);
}
// dispatch an entry removed event
postEvent(new EntryRemovedEvent(_oid, name, key, oldEntry));
} catch (ObjectAccessException oae) {
Log.warning("Unable to request entryRemove [name=" + name +
", key=" + key + ", error=" + oae + "].");
}
}
/**
* Calls by derived instances when a set updater method was called.
*/
protected void requestEntryUpdate (String name, DSet.Entry entry)
{
try {
DSet set = (DSet)getAttribute(name);
// if we're on the authoritative server, we update the set
// immediately
DSet.Entry oldEntry = null;
if (_omgr != null && _omgr.isManager(this)) {
oldEntry = set.get(entry.getKey());
set.update(entry);
}
// dispatch an entry updated event
postEvent(new EntryUpdatedEvent(_oid, name, entry, oldEntry));
} catch (ObjectAccessException oae) {
Log.warning("Unable to request entryUpdate [name=" + name +
", entry=" + entry + ", error=" + oae + "].");
}
}
/**
* Returns the {@link Field} with the specified name or null if there
* is none such.
*/
protected final Field getField (String name)
{
int low = 0, high = _fields.length-1;
while (low <= high) {
int mid = (low + high) >> 1;
Field midVal = _fields[mid];
int cmp = midVal.getName().compareTo(name);
if (cmp < 0) {
low = mid + 1;
} else if (cmp > 0) {
high = mid - 1;
} else {
return midVal; // key found
}
}
return null; // key not found.
}
/** Our object id. */
protected int _oid;
/** An array of our fields, sorted for efficient lookup. */
protected transient Field[] _fields;
/** A reference to our object manager. */
protected transient DObjectManager _omgr;
/** The entity that tells us if an event or subscription request
* should be allowed. */
protected transient AccessController _controller;
/** A list of outstanding locks. */
protected transient Object[] _locks;
/** Our subscribers list. */
protected transient Object[] _subs;
/** Our event listeners list. */
protected transient Object[] _listeners;
/** Our subscriber count. */
protected transient int _scount;
/** The compound event associated with our transaction, if we're
* currently in a transaction. */
protected transient CompoundEvent _tevent;
/** The nesting depth of our current transaction. */
protected transient int _tcount;
/** Whether or not our nested transaction has been cancelled. */
protected transient boolean _tcancelled;
/** Indicates whether we want to be destroyed when our last subscriber
* is removed. */
protected transient boolean _deathWish = false;
/** Maintains a mapping of sorted field arrays for each distributed
* object class. */
protected static HashMap _ftable = new HashMap();
/** Used to sort and search {@link #_fields}. */
protected static final Comparator FIELD_COMP = new Comparator() {
public int compare (Object o1, Object o2) {
return ((Field)o1).getName().compareTo(((Field)o2).getName());
}
};
}
|
package com.konkerlabs.platform.registry.integration.processors;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.konkerlabs.platform.registry.business.exceptions.BusinessException;
import com.konkerlabs.platform.registry.business.model.Device;
import com.konkerlabs.platform.registry.business.model.Event;
import com.konkerlabs.platform.registry.business.model.Gateway;
import com.konkerlabs.platform.registry.business.services.LocationTreeUtils;
import com.konkerlabs.platform.registry.business.services.api.DeviceRegisterService;
import com.konkerlabs.platform.registry.business.services.api.ServiceResponse;
import com.konkerlabs.platform.registry.data.services.api.DeviceLogEventService;
import com.konkerlabs.platform.registry.data.services.routes.api.EventRouteExecutor;
import com.konkerlabs.platform.utilities.parsers.json.JsonParsingService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import java.text.MessageFormat;
import java.time.Instant;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class DeviceEventProcessor {
public enum Messages {
APIKEY_MISSING("integration.event_processor.api_key.missing"),
CHANNEL_MISSING("integration.event_processor.channel.missing"),
DEVICE_NOT_FOUND("integration.event_processor.channel.not_found"),
INVALID_GATEWAY_LOCATION("integration.event_processor.gateway.location.invalid"),
INVALID_PAYLOAD("integration.event_processor.payload.invalid");
private String code;
public String getCode() {
return code;
}
Messages(String code) {
this.code = code;
}
}
private static final String EVENT_DROPPED = "Incoming event has been dropped: [Device: {0}] - [Payload: {1}]";
private static final Logger LOGGER = LoggerFactory.getLogger(DeviceEventProcessor.class);
private EventRouteExecutor eventRouteExecutor;
private DeviceRegisterService deviceRegisterService;
private DeviceLogEventService deviceLogEventService;
private JsonParsingService jsonParsingService;
@Autowired
public DeviceEventProcessor(DeviceLogEventService deviceLogEventService,
EventRouteExecutor eventRouteExecutor,
DeviceRegisterService deviceRegisterService,
JsonParsingService jsonParsingService) {
this.deviceLogEventService = deviceLogEventService;
this.eventRouteExecutor = eventRouteExecutor;
this.deviceRegisterService = deviceRegisterService;
this.jsonParsingService = jsonParsingService;
}
public void process(String apiKey, String channel, String payload) throws BusinessException {
process(apiKey, channel, payload, Instant.now());
}
public void process(String apiKey, String channel, String payload, Instant timestamp) throws BusinessException {
Optional.ofNullable(apiKey).filter(s -> !s.isEmpty())
.orElseThrow(() -> new BusinessException(Messages.APIKEY_MISSING.getCode()));
Device device = Optional.ofNullable(deviceRegisterService.findByApiKey(apiKey))
.orElseThrow(() -> new BusinessException(Messages.DEVICE_NOT_FOUND.getCode()));
process(device, channel, payload, timestamp);
}
private Boolean isValidAuthority(Gateway gateway, Device device) throws BusinessException {
return LocationTreeUtils.isSublocationOf(gateway.getLocation(), device.getLocation());
}
@SuppressWarnings("unchecked")
public void proccess(Gateway gateway, String payload) throws BusinessException, JsonProcessingException {
List<Map<String, Object>> devicesEvent = jsonParsingService.toListMap(payload);
for (Map<String, Object> map : devicesEvent) {
ServiceResponse<Device> result = deviceRegisterService.findByDeviceId(
gateway.getTenant(),
gateway.getApplication(),
map.get("deviceId").toString());
if (result.isOk() && Optional.ofNullable(result.getResult()).isPresent()) {
Device device = result.getResult();
if (isValidAuthority(gateway, device)) {
process(
device,
map.get("channel").toString(),
jsonParsingService.toJsonString((Map<String, Object>) map.get("payload")),
Instant.now());
} else {
throw new BusinessException(Messages.INVALID_GATEWAY_LOCATION.getCode());
}
}
}
}
public void process(Device device, String channel, String payload, Instant timestamp) throws BusinessException {
Optional.ofNullable(channel).filter(s -> !s.isEmpty())
.orElseThrow(() -> new BusinessException(Messages.CHANNEL_MISSING.getCode()));
Event event = Event.builder()
.incoming(
Event.EventActor.builder()
.deviceGuid(device.getGuid())
.channel(channel)
.deviceId(device.getDeviceId())
.tenantDomain(Optional.ofNullable(device.getTenant()).isPresent()
? device.getTenant().getDomainName() : null)
.applicationName(Optional.ofNullable(device.getApplication()).isPresent()
? device.getApplication().getName(): null)
.build()
)
.creationTimestamp(timestamp)
.ingestedTimestamp(timestamp)
.payload(payload)
.build();
if (device.isActive()) {
ServiceResponse<Event> logResponse = deviceLogEventService.logIncomingEvent(device, event);
if (logResponse.isOk()) {
eventRouteExecutor.execute(event, device);
} else {
LOGGER.error(MessageFormat.format("Could not log incoming message. Probably invalid payload.: [Device: {0}] - [Payload: {1}]",
device.toURI(),
payload),
event.getIncoming().toURI(),
device.getLogLevel()
);
throw new BusinessException(Messages.INVALID_PAYLOAD.getCode());
}
} else {
LOGGER.debug(MessageFormat.format(EVENT_DROPPED,
device.toURI(),
payload),
event.getIncoming().toURI(),
device.getLogLevel());
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.