lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | 7c98ceb86afe1b4929c26b283c56355df5ee9cd7 | 0 | facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.rendercore;
import static com.facebook.rendercore.RenderUnit.RenderType.DRAWABLE;
import static com.facebook.rendercore.RenderUnit.RenderType.VIEW;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.core.graphics.drawable.DrawableCompat;
import androidx.core.view.ViewCompat;
/** A ViewGroup that can be used as a host for subtrees in a RenderCore tree. */
public class HostView extends Host {
private static final int INITIAL_MOUNT_ITEMS_SIZE = 8;
private final InterleavedDispatchDraw mDispatchDraw = new InterleavedDispatchDraw();
private MountItem[] mMountItems;
private int[] mChildDrawingOrder = new int[0];
private boolean mIsChildDrawingOrderDirty;
private boolean mInLayout;
private @Nullable InterceptTouchHandler mOnInterceptTouchEventHandler;
private @Nullable MountItem[] mScrapMountItemsArray;
private @Nullable Object mViewTag;
private @Nullable SparseArray<Object> mViewTags;
private @Nullable Drawable mForeground;
/**
* {@link ViewGroup#getClipChildren()} was only added in API 18, will need to keep track of this
* flag ourselves on the lower versions
*/
private boolean mClipChildren = true;
public HostView(Context context) {
this(context, null);
}
public HostView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
setWillNotDraw(false);
setChildrenDrawingOrderEnabled(true);
mMountItems = new MountItem[INITIAL_MOUNT_ITEMS_SIZE];
}
@Override
public void mount(int index, MountItem mountItem) {
if (mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
mountDrawable(mountItem);
} else {
mountView(mountItem);
}
ensureSize(index);
mMountItems[index] = mountItem;
}
private void ensureSize(int index) {
if (index >= mMountItems.length) {
int newLength = mMountItems.length * 2;
while (index >= newLength) {
newLength = newLength * 2;
}
final MountItem[] tmp = new MountItem[newLength];
System.arraycopy(mMountItems, 0, tmp, 0, mMountItems.length);
mMountItems = tmp;
}
}
@Override
public void unmount(MountItem item) {
final int index = findItemIndex(item);
unmount(index, item);
}
private int findItemIndex(MountItem item) {
for (int i = 0; i < mMountItems.length; i++) {
if (mMountItems[i] == item) {
return i;
}
}
throw new IllegalStateException(
"Mount item "
+ item
+ "Was selected for unmount but was not found in the list of mounted items");
}
@Override
public void unmount(int index, MountItem mountItem) {
if (mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
unmountDrawable(mountItem);
} else {
unmountView(mountItem);
mIsChildDrawingOrderDirty = true;
}
MountUtils.removeItem(index, mMountItems, mScrapMountItemsArray);
releaseScrapDataStructuresIfNeeded();
}
@Override
public int getMountItemCount() {
int size = 0;
for (int i = 0; i < mMountItems.length; i++) {
if (mMountItems[i] != null) {
size++;
}
}
return size;
}
@Override
public MountItem getMountItemAt(int index) {
return mMountItems[index];
}
@Override
public void moveItem(MountItem item, int oldIndex, int newIndex) {
if (item == null && mScrapMountItemsArray != null) {
item = mScrapMountItemsArray[oldIndex];
}
if (item == null) {
return;
}
final Object content = item.getContent();
if (item.getRenderUnit().getRenderType() == DRAWABLE) {
invalidate();
} else {
mIsChildDrawingOrderDirty = true;
startTemporaryDetach((View) content);
}
ensureSize(newIndex);
if (mMountItems[newIndex] != null) {
ensureScrapMountItemsArray();
MountUtils.scrapItemAt(newIndex, mMountItems, mScrapMountItemsArray);
}
MountUtils.moveItem(oldIndex, newIndex, mMountItems, mScrapMountItemsArray);
releaseScrapDataStructuresIfNeeded();
if (item.getRenderUnit().getRenderType() == VIEW) {
finishTemporaryDetach((View) content);
}
}
/**
* Sets an InterceptTouchHandler that will be invoked when {@link HostView#onInterceptTouchEvent}
* is called.
*
* @param interceptTouchEventHandler the handler to be set on this host.
*/
public void setInterceptTouchEventHandler(
@Nullable InterceptTouchHandler interceptTouchEventHandler) {
mOnInterceptTouchEventHandler = interceptTouchEventHandler;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
if (mOnInterceptTouchEventHandler != null) {
return mOnInterceptTouchEventHandler.onInterceptTouchEvent(this, ev);
}
return super.onInterceptTouchEvent(ev);
}
private void mountView(MountItem mountItem) {
final View view = (View) mountItem.getContent();
mIsChildDrawingOrderDirty = true;
// A host has been recycled and is already attached.
if (view instanceof HostView && view.getParent() == this) {
finishTemporaryDetach(view);
view.setVisibility(VISIBLE);
return;
}
LayoutParams lp = view.getLayoutParams();
if (lp == null) {
lp = generateDefaultLayoutParams();
view.setLayoutParams(lp);
}
if (mInLayout) {
super.addViewInLayout(view, -1, view.getLayoutParams(), true);
} else {
super.addView(view, -1, view.getLayoutParams());
}
}
private void unmountView(MountItem mountItem) {
final View view = (View) mountItem.getContent();
mIsChildDrawingOrderDirty = true;
// Sometime a view is not getting it's 'pressed' state reset before unmount, causing that state
// to not be cleared and carried to next reuse, therefore applying wrong drawable state.
// Particular case where this might happen is when view is unmounted as soon as click event
// is triggered.
if (view.isPressed()) {
view.setPressed(false);
}
if (mInLayout) {
super.removeViewInLayout(view);
} else {
super.removeView(view);
}
}
@Override
public void dispatchDraw(Canvas canvas) {
mDispatchDraw.start(canvas);
super.dispatchDraw(canvas);
// Cover the case where the host has no child views, in which case
// getChildDrawingOrder() will not be called and the draw index will not
// be incremented. This will also cover the case where drawables must be
// painted after the last child view in the host.
if (mDispatchDraw.isRunning()) {
mDispatchDraw.drawNext();
}
mDispatchDraw.end();
}
@Override
protected int getChildDrawingOrder(int childCount, int i) {
updateChildDrawingOrderIfNeeded();
// This method is called in very different contexts within a ViewGroup
// e.g. when handling input events, drawing, etc. We only want to call
// the draw methods if the InterleavedDispatchDraw is active.
if (mDispatchDraw.isRunning()) {
mDispatchDraw.drawNext();
}
return mChildDrawingOrder[i];
}
@Override
public boolean shouldDelayChildPressedState() {
return false;
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
boolean handled = false;
if (isEnabled()) {
// Iterate drawable from last to first to respect drawing order.
for (int i = ((mMountItems == null) ? 0 : mMountItems.length) - 1; i >= 0; i--) {
final MountItem item = mMountItems[i];
if (item != null
&& item.getRenderUnit().getRenderType() == DRAWABLE
&& item.getContent() instanceof Touchable) {
final Touchable t = (Touchable) item.getContent();
if (t.shouldHandleTouchEvent(event) && t.onTouchEvent(event, this)) {
handled = true;
break;
}
}
}
}
if (!handled) {
handled = super.onTouchEvent(event);
}
return handled;
}
void performLayout(boolean changed, int l, int t, int r, int b) {}
@Override
protected final void onLayout(boolean changed, int l, int t, int r, int b) {
mInLayout = true;
performLayout(changed, l, t, r, b);
mInLayout = false;
}
@Override
public void requestLayout() {
// Don't request a layout if it will be blocked by any parent. Requesting a layout that is
// then ignored by an ancestor means that this host will remain in a state where it thinks that
// it has requested layout, and will therefore ignore future layout requests. This will lead to
// problems if a child (e.g. a ViewPager) requests a layout later on, since the request will be
// wrongly ignored by this host.
ViewParent parent = this;
while (parent instanceof HostView) {
final HostView host = (HostView) parent;
if (!host.shouldRequestLayout()) {
return;
}
parent = parent.getParent();
}
super.requestLayout();
}
protected boolean shouldRequestLayout() {
// Don't bubble during layout.
return !mInLayout;
}
@Override
@SuppressLint("MissingSuperCall")
protected boolean verifyDrawable(Drawable who) {
return true;
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
for (int i = 0, size = (mMountItems == null) ? 0 : mMountItems.length; i < size; i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
MountUtils.maybeSetDrawableState(this, (Drawable) mountItem.getContent());
}
}
if (mForeground != null) {
mForeground.setState(getDrawableState());
}
}
@Override
public void jumpDrawablesToCurrentState() {
super.jumpDrawablesToCurrentState();
for (int i = 0, size = (mMountItems == null) ? 0 : mMountItems.length; i < size; i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
final Drawable drawable = (Drawable) mountItem.getContent();
DrawableCompat.jumpToCurrentState(drawable);
}
}
if (mForeground != null) {
mForeground.jumpToCurrentState();
}
}
@Override
public void setVisibility(int visibility) {
super.setVisibility(visibility);
for (int i = 0, size = (mMountItems == null) ? 0 : mMountItems.length; i < size; i++) {
MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
final Drawable drawable = (Drawable) mountItem.getContent();
drawable.setVisible(visibility == View.VISIBLE, false);
}
}
}
/**
* Sets view tag on this host.
*
* @param viewTag the object to set as tag.
*/
public void setViewTag(Object viewTag) {
mViewTag = viewTag;
}
/**
* Sets view tags on this host.
*
* @param viewTags the map containing the tags by id.
*/
public void setViewTags(SparseArray<Object> viewTags) {
mViewTags = viewTags;
}
@Override
public Object getTag() {
if (mViewTag != null) {
return mViewTag;
}
return super.getTag();
}
@Override
public Object getTag(int key) {
if (mViewTags != null) {
final Object value = mViewTags.get(key);
if (value != null) {
return value;
}
}
return super.getTag(key);
}
@Override
public boolean getClipChildren() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
// There is no ViewGroup.getClipChildren() method on API < 18
return mClipChildren;
} else {
return super.getClipChildren();
}
}
@Override
public void setClipChildren(boolean clipChildren) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
// There is no ViewGroup.getClipChildren() method on API < 18, will keep track this way
mClipChildren = clipChildren;
}
super.setClipChildren(clipChildren);
}
private static void startTemporaryDetach(View view) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// Cancel any pending clicks.
view.cancelPendingInputEvents();
}
// The HostView's parent will send an ACTION_CANCEL if it's going to receive
// other motion events for the recycled child.
ViewCompat.dispatchStartTemporaryDetach(view);
}
private static void finishTemporaryDetach(View view) {
ViewCompat.dispatchFinishTemporaryDetach(view);
}
private void updateChildDrawingOrderIfNeeded() {
if (!mIsChildDrawingOrderDirty) {
return;
}
final int childCount = getChildCount();
if (mChildDrawingOrder.length < childCount) {
mChildDrawingOrder = new int[childCount + 5];
}
int index = 0;
final int mountItemCount = mMountItems == null ? 0 : mMountItems.length;
for (int i = 0; i < mountItemCount; i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == VIEW) {
final View child = (View) mountItem.getContent();
mChildDrawingOrder[index++] = indexOfChild(child);
}
}
mIsChildDrawingOrderDirty = false;
}
private void ensureScrapMountItemsArray() {
if (mScrapMountItemsArray == null) {
mScrapMountItemsArray = new MountItem[mMountItems.length];
}
}
private void releaseScrapDataStructuresIfNeeded() {
if (mScrapMountItemsArray != null && isEmpty(mScrapMountItemsArray)) {
mScrapMountItemsArray = null;
}
}
private static boolean isEmpty(MountItem[] scrapMountItemsArray) {
for (int i = 0; i < scrapMountItemsArray.length; i++) {
if (scrapMountItemsArray[i] != null) {
return false;
}
}
return true;
}
private void mountDrawable(MountItem mountItem) {
final Drawable drawable = (Drawable) mountItem.getContent();
MountUtils.mountDrawable(this, drawable);
invalidate(mountItem.getRenderTreeNode().getBounds());
}
private void unmountDrawable(MountItem mountItem) {
final Drawable drawable = (Drawable) mountItem.getContent();
drawable.setCallback(null);
invalidate(drawable.getBounds());
}
/**
* Encapsulates the logic for drawing a set of views and drawables respecting their drawing order
* withing the component host i.e. allow interleaved views and drawables to be drawn with the
* correct z-index.
*/
private class InterleavedDispatchDraw {
private @Nullable Canvas mCanvas;
private int mDrawIndex;
private int mItemsToDraw;
private InterleavedDispatchDraw() {}
private void start(Canvas canvas) {
mCanvas = canvas;
mDrawIndex = 0;
mItemsToDraw = mMountItems == null ? 0 : getMountItemCount();
}
private boolean isRunning() {
return (mCanvas != null && mDrawIndex < mItemsToDraw);
}
private void drawNext() {
if (mCanvas == null) {
return;
}
for (int i = mDrawIndex, size = (mMountItems == null) ? 0 : getMountItemCount();
i < size;
i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem == null) {
continue;
}
// During a ViewGroup's dispatchDraw() call with children drawing order enabled,
// getChildDrawingOrder() will be called before each child view is drawn. This
// method will only draw the drawables "between" the child views and the let
// the host draw its children as usual. This is why views are skipped here.
if (mountItem.getRenderUnit().getRenderType() == VIEW) {
mDrawIndex = i + 1;
return;
}
if (!mountItem.isBound()) {
continue;
}
((Drawable) mountItem.getContent()).draw(mCanvas);
}
mDrawIndex = mItemsToDraw;
}
private void end() {
mCanvas = null;
}
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
if (mForeground != null) {
mForeground.setBounds(0, 0, getRight(), getBottom());
}
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
if (mForeground != null) {
mForeground.draw(canvas);
}
}
public void setForegroundCompat(@Nullable Drawable drawable) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
MarshmallowHelper.setForeground(this, drawable);
} else {
setForegroundLollipop(drawable);
}
}
/**
* Copied over from FrameLayout#setForeground from API Version 16 with some differences: supports
* only fill gravity and does not support padded foreground
*/
private void setForegroundLollipop(@Nullable Drawable newForeground) {
if (mForeground != newForeground) {
if (mForeground != null) {
mForeground.setCallback(null);
unscheduleDrawable(mForeground);
}
mForeground = newForeground;
if (newForeground != null) {
newForeground.setCallback(this);
if (newForeground.isStateful()) {
newForeground.setState(getDrawableState());
}
}
invalidate();
}
}
static class MarshmallowHelper {
@RequiresApi(api = Build.VERSION_CODES.M)
static void setForeground(HostView hostView, @Nullable Drawable newForeground) {
hostView.setForeground(newForeground);
}
}
static void performLayoutOnChildrenIfNecessary(HostView host) {
for (int i = 0, count = host.getChildCount(); i < count; i++) {
final View child = host.getChildAt(i);
if (child.isLayoutRequested()) {
// The hosting view doesn't allow children to change sizes dynamically as
// this would conflict with the component's own layout calculations.
child.measure(
MeasureSpec.makeMeasureSpec(child.getWidth(), MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(child.getHeight(), MeasureSpec.EXACTLY));
child.layout(child.getLeft(), child.getTop(), child.getRight(), child.getBottom());
}
if (child instanceof HostView) {
performLayoutOnChildrenIfNecessary((HostView) child);
}
}
}
}
| litho-rendercore/src/main/java/com/facebook/rendercore/HostView.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.rendercore;
import static com.facebook.rendercore.RenderUnit.RenderType.DRAWABLE;
import static com.facebook.rendercore.RenderUnit.RenderType.VIEW;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.util.SparseArray;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.core.graphics.drawable.DrawableCompat;
import androidx.core.view.ViewCompat;
/** A ViewGroup that can be used as a host for subtrees in a RenderCore tree. */
public class HostView extends Host {
private static final int INITIAL_MOUNT_ITEMS_SIZE = 8;
private final InterleavedDispatchDraw mDispatchDraw = new InterleavedDispatchDraw();
private MountItem[] mMountItems;
private int[] mChildDrawingOrder = new int[0];
private boolean mIsChildDrawingOrderDirty;
private boolean mInLayout;
private @Nullable InterceptTouchHandler mOnInterceptTouchEventHandler;
private @Nullable MountItem[] mScrapMountItemsArray;
private @Nullable Object mViewTag;
private @Nullable SparseArray<Object> mViewTags;
private @Nullable Drawable mForeground;
/**
* {@link ViewGroup#getClipChildren()} was only added in API 18, will need to keep track of this
* flag ourselves on the lower versions
*/
private boolean mClipChildren = true;
public HostView(Context context) {
this(context, null);
}
public HostView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
setWillNotDraw(false);
setChildrenDrawingOrderEnabled(true);
mMountItems = new MountItem[INITIAL_MOUNT_ITEMS_SIZE];
}
@Override
public void mount(int index, MountItem mountItem) {
if (mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
mountDrawable(mountItem);
} else {
mountView(mountItem);
}
ensureSize(index);
mMountItems[index] = mountItem;
}
private void ensureSize(int index) {
if (index >= mMountItems.length) {
int newLength = mMountItems.length * 2;
while (index >= newLength) {
newLength = newLength * 2;
}
final MountItem[] tmp = new MountItem[newLength];
System.arraycopy(mMountItems, 0, tmp, 0, mMountItems.length);
mMountItems = tmp;
}
}
@Override
public void unmount(MountItem item) {
final int index = findItemIndex(item);
unmount(index, item);
}
private int findItemIndex(MountItem item) {
for (int i = 0; i < mMountItems.length; i++) {
if (mMountItems[i] == item) {
return i;
}
}
throw new IllegalStateException(
"Mount item "
+ item
+ "Was selected for unmount but was not found in the list of mounted items");
}
@Override
public void unmount(int index, MountItem mountItem) {
if (mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
unmountDrawable(mountItem);
} else {
unmountView(mountItem);
mIsChildDrawingOrderDirty = true;
}
MountUtils.removeItem(index, mMountItems, mScrapMountItemsArray);
releaseScrapDataStructuresIfNeeded();
}
@Override
public int getMountItemCount() {
int size = 0;
for (int i = 0; i < mMountItems.length; i++) {
if (mMountItems[i] != null) {
size++;
}
}
return size;
}
@Override
public MountItem getMountItemAt(int index) {
return mMountItems[index];
}
@Override
public void moveItem(MountItem item, int oldIndex, int newIndex) {
if (item == null && mScrapMountItemsArray != null) {
item = mScrapMountItemsArray[oldIndex];
}
if (item == null) {
return;
}
final Object content = item.getContent();
if (item.getRenderUnit().getRenderType() == DRAWABLE) {
invalidate();
} else {
mIsChildDrawingOrderDirty = true;
startTemporaryDetach((View) content);
}
ensureSize(newIndex);
if (mMountItems[newIndex] != null) {
ensureScrapMountItemsArray();
MountUtils.scrapItemAt(newIndex, mMountItems, mScrapMountItemsArray);
}
MountUtils.moveItem(oldIndex, newIndex, mMountItems, mScrapMountItemsArray);
releaseScrapDataStructuresIfNeeded();
if (item.getRenderUnit().getRenderType() == VIEW) {
finishTemporaryDetach((View) content);
}
}
/**
* Sets an InterceptTouchHandler that will be invoked when {@link HostView#onInterceptTouchEvent}
* is called.
*
* @param interceptTouchEventHandler the handler to be set on this host.
*/
public void setInterceptTouchEventHandler(
@Nullable InterceptTouchHandler interceptTouchEventHandler) {
mOnInterceptTouchEventHandler = interceptTouchEventHandler;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
if (mOnInterceptTouchEventHandler != null) {
return mOnInterceptTouchEventHandler.onInterceptTouchEvent(this, ev);
}
return super.onInterceptTouchEvent(ev);
}
private void mountView(MountItem mountItem) {
final View view = (View) mountItem.getContent();
mIsChildDrawingOrderDirty = true;
// A host has been recycled and is already attached.
if (view instanceof HostView && view.getParent() == this) {
finishTemporaryDetach(view);
view.setVisibility(VISIBLE);
return;
}
LayoutParams lp = view.getLayoutParams();
if (lp == null) {
lp = generateDefaultLayoutParams();
view.setLayoutParams(lp);
}
if (mInLayout) {
super.addViewInLayout(view, -1, view.getLayoutParams(), true);
} else {
super.addView(view, -1, view.getLayoutParams());
}
}
private void unmountView(MountItem mountItem) {
final View view = (View) mountItem.getContent();
mIsChildDrawingOrderDirty = true;
// Sometime a view is not getting it's 'pressed' state reset before unmount, causing that state
// to not be cleared and carried to next reuse, therefore applying wrong drawable state.
// Particular case where this might happen is when view is unmounted as soon as click event
// is triggered.
if (view.isPressed()) {
view.setPressed(false);
}
if (mInLayout) {
super.removeViewInLayout(view);
} else {
super.removeView(view);
}
}
@Override
public void dispatchDraw(Canvas canvas) {
mDispatchDraw.start(canvas);
super.dispatchDraw(canvas);
// Cover the case where the host has no child views, in which case
// getChildDrawingOrder() will not be called and the draw index will not
// be incremented. This will also cover the case where drawables must be
// painted after the last child view in the host.
if (mDispatchDraw.isRunning()) {
mDispatchDraw.drawNext();
}
mDispatchDraw.end();
}
@Override
protected int getChildDrawingOrder(int childCount, int i) {
updateChildDrawingOrderIfNeeded();
// This method is called in very different contexts within a ViewGroup
// e.g. when handling input events, drawing, etc. We only want to call
// the draw methods if the InterleavedDispatchDraw is active.
if (mDispatchDraw.isRunning()) {
mDispatchDraw.drawNext();
}
return mChildDrawingOrder[i];
}
@Override
public boolean shouldDelayChildPressedState() {
return false;
}
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouchEvent(MotionEvent event) {
boolean handled = false;
if (isEnabled()) {
// Iterate drawable from last to first to respect drawing order.
for (int i = ((mMountItems == null) ? 0 : mMountItems.length) - 1; i >= 0; i--) {
final MountItem item = mMountItems[i];
if (item != null
&& item.getRenderUnit().getRenderType() == DRAWABLE
&& item.getContent() instanceof Touchable) {
final Touchable t = (Touchable) item.getContent();
if (t.shouldHandleTouchEvent(event) && t.onTouchEvent(event, this)) {
handled = true;
break;
}
}
}
}
if (!handled) {
handled = super.onTouchEvent(event);
}
return handled;
}
void performLayout(boolean changed, int l, int t, int r, int b) {}
@Override
protected final void onLayout(boolean changed, int l, int t, int r, int b) {
mInLayout = true;
performLayout(changed, l, t, r, b);
mInLayout = false;
}
@Override
public void requestLayout() {
// Don't request a layout if it will be blocked by any parent. Requesting a layout that is
// then ignored by an ancestor means that this host will remain in a state where it thinks that
// it has requested layout, and will therefore ignore future layout requests. This will lead to
// problems if a child (e.g. a ViewPager) requests a layout later on, since the request will be
// wrongly ignored by this host.
ViewParent parent = this;
while (parent instanceof HostView) {
final HostView host = (HostView) parent;
if (!host.shouldRequestLayout()) {
return;
}
parent = parent.getParent();
}
super.requestLayout();
}
protected boolean shouldRequestLayout() {
// Don't bubble during layout.
return !mInLayout;
}
@Override
@SuppressLint("MissingSuperCall")
protected boolean verifyDrawable(Drawable who) {
return true;
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
for (int i = 0, size = (mMountItems == null) ? 0 : mMountItems.length; i < size; i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
MountUtils.maybeSetDrawableState(this, (Drawable) mountItem.getContent());
}
}
if (mForeground != null) {
mForeground.setState(getDrawableState());
}
}
@Override
public void jumpDrawablesToCurrentState() {
super.jumpDrawablesToCurrentState();
for (int i = 0, size = (mMountItems == null) ? 0 : mMountItems.length; i < size; i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
final Drawable drawable = (Drawable) mountItem.getContent();
DrawableCompat.jumpToCurrentState(drawable);
}
}
if (mForeground != null) {
mForeground.jumpToCurrentState();
}
}
@Override
public void setVisibility(int visibility) {
super.setVisibility(visibility);
for (int i = 0, size = (mMountItems == null) ? 0 : mMountItems.length; i < size; i++) {
MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == DRAWABLE) {
final Drawable drawable = (Drawable) mountItem.getContent();
drawable.setVisible(visibility == View.VISIBLE, false);
}
}
}
/**
* Sets view tag on this host.
*
* @param viewTag the object to set as tag.
*/
public void setViewTag(Object viewTag) {
mViewTag = viewTag;
}
/**
* Sets view tags on this host.
*
* @param viewTags the map containing the tags by id.
*/
public void setViewTags(SparseArray<Object> viewTags) {
mViewTags = viewTags;
}
@Override
public Object getTag() {
if (mViewTag != null) {
return mViewTag;
}
return super.getTag();
}
@Override
public Object getTag(int key) {
if (mViewTags != null) {
final Object value = mViewTags.get(key);
if (value != null) {
return value;
}
}
return super.getTag(key);
}
@Override
public boolean getClipChildren() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
// There is no ViewGroup.getClipChildren() method on API < 18
return mClipChildren;
} else {
return super.getClipChildren();
}
}
@Override
public void setClipChildren(boolean clipChildren) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) {
// There is no ViewGroup.getClipChildren() method on API < 18, will keep track this way
mClipChildren = clipChildren;
}
super.setClipChildren(clipChildren);
}
private static void startTemporaryDetach(View view) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// Cancel any pending clicks.
view.cancelPendingInputEvents();
}
// The HostView's parent will send an ACTION_CANCEL if it's going to receive
// other motion events for the recycled child.
ViewCompat.dispatchStartTemporaryDetach(view);
}
private static void finishTemporaryDetach(View view) {
ViewCompat.dispatchFinishTemporaryDetach(view);
}
private void updateChildDrawingOrderIfNeeded() {
if (!mIsChildDrawingOrderDirty) {
return;
}
final int childCount = getChildCount();
if (mChildDrawingOrder.length < childCount) {
mChildDrawingOrder = new int[childCount + 5];
}
int index = 0;
final int mountItemCount = mMountItems == null ? 0 : mMountItems.length;
for (int i = 0; i < mountItemCount; i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem != null && mountItem.getRenderUnit().getRenderType() == VIEW) {
final View child = (View) mountItem.getContent();
mChildDrawingOrder[index++] = indexOfChild(child);
}
}
mIsChildDrawingOrderDirty = false;
}
private void ensureScrapMountItemsArray() {
if (mScrapMountItemsArray == null) {
mScrapMountItemsArray = new MountItem[mMountItems.length];
}
}
private void releaseScrapDataStructuresIfNeeded() {
if (mScrapMountItemsArray != null && isEmpty(mScrapMountItemsArray)) {
mScrapMountItemsArray = null;
}
}
private static boolean isEmpty(MountItem[] scrapMountItemsArray) {
for (int i = 0; i < scrapMountItemsArray.length; i++) {
if (scrapMountItemsArray[i] != null) {
return false;
}
}
return true;
}
private void mountDrawable(MountItem mountItem) {
final Drawable drawable = (Drawable) mountItem.getContent();
MountUtils.mountDrawable(this, drawable);
}
private void unmountDrawable(MountItem mountItem) {
final Drawable drawable = (Drawable) mountItem.getContent();
drawable.setCallback(null);
invalidate(drawable.getBounds());
}
/**
* Encapsulates the logic for drawing a set of views and drawables respecting their drawing order
* withing the component host i.e. allow interleaved views and drawables to be drawn with the
* correct z-index.
*/
private class InterleavedDispatchDraw {
private @Nullable Canvas mCanvas;
private int mDrawIndex;
private int mItemsToDraw;
private InterleavedDispatchDraw() {}
private void start(Canvas canvas) {
mCanvas = canvas;
mDrawIndex = 0;
mItemsToDraw = mMountItems == null ? 0 : getMountItemCount();
}
private boolean isRunning() {
return (mCanvas != null && mDrawIndex < mItemsToDraw);
}
private void drawNext() {
if (mCanvas == null) {
return;
}
for (int i = mDrawIndex, size = (mMountItems == null) ? 0 : getMountItemCount();
i < size;
i++) {
final MountItem mountItem = mMountItems[i];
if (mountItem == null) {
continue;
}
// During a ViewGroup's dispatchDraw() call with children drawing order enabled,
// getChildDrawingOrder() will be called before each child view is drawn. This
// method will only draw the drawables "between" the child views and the let
// the host draw its children as usual. This is why views are skipped here.
if (mountItem.getRenderUnit().getRenderType() == VIEW) {
mDrawIndex = i + 1;
return;
}
if (!mountItem.isBound()) {
continue;
}
((Drawable) mountItem.getContent()).draw(mCanvas);
}
mDrawIndex = mItemsToDraw;
}
private void end() {
mCanvas = null;
}
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
if (mForeground != null) {
mForeground.setBounds(0, 0, getRight(), getBottom());
}
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
if (mForeground != null) {
mForeground.draw(canvas);
}
}
public void setForegroundCompat(@Nullable Drawable drawable) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
MarshmallowHelper.setForeground(this, drawable);
} else {
setForegroundLollipop(drawable);
}
}
/**
* Copied over from FrameLayout#setForeground from API Version 16 with some differences: supports
* only fill gravity and does not support padded foreground
*/
private void setForegroundLollipop(@Nullable Drawable newForeground) {
if (mForeground != newForeground) {
if (mForeground != null) {
mForeground.setCallback(null);
unscheduleDrawable(mForeground);
}
mForeground = newForeground;
if (newForeground != null) {
newForeground.setCallback(this);
if (newForeground.isStateful()) {
newForeground.setState(getDrawableState());
}
}
invalidate();
}
}
static class MarshmallowHelper {
@RequiresApi(api = Build.VERSION_CODES.M)
static void setForeground(HostView hostView, @Nullable Drawable newForeground) {
hostView.setForeground(newForeground);
}
}
static void performLayoutOnChildrenIfNecessary(HostView host) {
for (int i = 0, count = host.getChildCount(); i < count; i++) {
final View child = host.getChildAt(i);
if (child.isLayoutRequested()) {
// The hosting view doesn't allow children to change sizes dynamically as
// this would conflict with the component's own layout calculations.
child.measure(
MeasureSpec.makeMeasureSpec(child.getWidth(), MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(child.getHeight(), MeasureSpec.EXACTLY));
child.layout(child.getLeft(), child.getTop(), child.getRight(), child.getBottom());
}
if (child instanceof HostView) {
performLayoutOnChildrenIfNecessary((HostView) child);
}
}
}
}
| Invalidates HostView when a drawable in mounted
Summary: Invalidates HostView when a drawable in mounted
Reviewed By: mihaelao
Differential Revision: D24949498
fbshipit-source-id: 6807b46a6674d298042bf1bf177b43aa4476eac6
| litho-rendercore/src/main/java/com/facebook/rendercore/HostView.java | Invalidates HostView when a drawable in mounted |
|
Java | apache-2.0 | 86a9147ebb3f2204fe79554d494a1e2bf172b8e2 | 0 | auricgoldfinger/cgeo,KublaikhanGeek/cgeo,SammysHP/cgeo,brok85/cgeo,marco-dev/c-geo-opensource,superspindel/cgeo,schwabe/cgeo,Bananeweizen/cgeo,rsudev/c-geo-opensource,samueltardieu/cgeo,lewurm/cgeo,kumy/cgeo,yummy222/cgeo,yummy222/cgeo,mucek4/cgeo,mucek4/cgeo,brok85/cgeo,schwabe/cgeo,auricgoldfinger/cgeo,ThibaultR/cgeo,cgeo/cgeo,madankb/cgeo,xiaoyanit/cgeo,lewurm/cgeo,pstorch/cgeo,pstorch/cgeo,ThibaultR/cgeo,matej116/cgeo,matej116/cgeo,tobiasge/cgeo,SammysHP/cgeo,S-Bartfast/cgeo,cgeo/cgeo,KublaikhanGeek/cgeo,xiaoyanit/cgeo,samueltardieu/cgeo,KublaikhanGeek/cgeo,matej116/cgeo,madankb/cgeo,Huertix/cgeo,vishwakulkarni/cgeo,madankb/cgeo,superspindel/cgeo,SammysHP/cgeo,xiaoyanit/cgeo,auricgoldfinger/cgeo,Bananeweizen/cgeo,tobiasge/cgeo,vishwakulkarni/cgeo,marco-dev/c-geo-opensource,ThibaultR/cgeo,kumy/cgeo,kumy/cgeo,brok85/cgeo,schwabe/cgeo,mucek4/cgeo,superspindel/cgeo,rsudev/c-geo-opensource,Bananeweizen/cgeo,pstorch/cgeo,cgeo/cgeo,schwabe/cgeo,tobiasge/cgeo,rsudev/c-geo-opensource,marco-dev/c-geo-opensource,cgeo/cgeo,lewurm/cgeo,S-Bartfast/cgeo,samueltardieu/cgeo,vishwakulkarni/cgeo,Huertix/cgeo,Huertix/cgeo,S-Bartfast/cgeo,yummy222/cgeo | package cgeo.geocaching.maps;
import cgeo.geocaching.maps.google.GoogleMapProvider;
import cgeo.geocaching.maps.interfaces.MapProvider;
import cgeo.geocaching.maps.interfaces.MapSource;
import cgeo.geocaching.maps.mapsforge.MapsforgeMapProvider;
import android.view.Menu;
import java.util.SortedMap;
import java.util.TreeMap;
public class MapProviderFactory {
private final static int GOOGLEMAP_BASEID = 30;
private final static int MFMAP_BASEID = 40;
private final static MapProvider[] mapProviders;
private final static SortedMap<Integer, MapSource> mapSources;
static {
// add GoogleMapProvider only if google api is available in order to support x86 android emulator
if (isGoogleMapsInstalled()) {
mapProviders = new MapProvider[] { new GoogleMapProvider(GOOGLEMAP_BASEID), new MapsforgeMapProvider(MFMAP_BASEID) };
}
else {
mapProviders = new MapProvider[] { new MapsforgeMapProvider(MFMAP_BASEID) };
}
mapSources = new TreeMap<Integer, MapSource>();
for (MapProvider mp : mapProviders) {
mapSources.putAll(mp.getMapSources());
}
}
private static boolean isGoogleMapsInstalled() {
boolean googleMaps = true;
try {
Class.forName("com.google.android.maps.MapActivity");
} catch (ClassNotFoundException e) {
googleMaps = false;
}
return googleMaps;
}
public static SortedMap<Integer, MapSource> getMapSources() {
return mapSources;
}
public static boolean isValidSourceId(int sourceId) {
return mapSources.containsKey(sourceId);
}
public static boolean isSameActivity(int sourceId1, int sourceId2) {
for (MapProvider mp : mapProviders) {
if (mp.isMySource(sourceId1) && mp.isMySource(sourceId2)) {
return mp.isSameActivity(sourceId1, sourceId2);
}
}
return false;
}
public static MapProvider getMapProvider(int sourceId) {
for (MapProvider mp : mapProviders) {
if (mp.isMySource(sourceId)) {
return mp;
}
}
return mapProviders[0];
}
public static int getSourceOrdinalFromId(int sourceId) {
int sourceOrdinal = 0;
for (int key : mapSources.keySet()) {
if (sourceId == key) {
return sourceOrdinal;
}
sourceOrdinal++;
}
return 0;
}
public static int getSourceIdFromOrdinal(int sourceOrdinal) {
int count = 0;
for (int key : mapSources.keySet()) {
if (sourceOrdinal == count) {
return key;
}
count++;
}
return mapSources.firstKey();
}
public static void addMapviewMenuItems(Menu parentMenu, int groupId, int currentSource) {
for (Integer key : mapSources.keySet()) {
parentMenu.add(groupId, key, 0, mapSources.get(key).getName()).setCheckable(true).setChecked(key == currentSource);
}
}
public static int getMapSourceFromMenuId(int menuId) {
return menuId;
}
public static MapSource getMapSource(int sourceId) {
return mapSources.get(sourceId);
}
}
| main/src/cgeo/geocaching/maps/MapProviderFactory.java | package cgeo.geocaching.maps;
import cgeo.geocaching.maps.google.GoogleMapProvider;
import cgeo.geocaching.maps.interfaces.MapProvider;
import cgeo.geocaching.maps.interfaces.MapSource;
import cgeo.geocaching.maps.mapsforge.MapsforgeMapProvider;
import android.view.Menu;
import java.util.SortedMap;
import java.util.TreeMap;
public class MapProviderFactory {
private final static int GOOGLEMAP_BASEID = 30;
private final static int MFMAP_BASEID = 40;
private final static MapProvider[] mapProviders;
private final static SortedMap<Integer, MapSource> mapSources;
static {
// add GoogleMapProvider only if google api is available in order to support x86 android emulator
if (isGoogleMapsInstalled()) {
mapProviders = new MapProvider[] { new GoogleMapProvider(GOOGLEMAP_BASEID), new MapsforgeMapProvider(MFMAP_BASEID) };
}
else {
mapProviders = new MapProvider[] { new MapsforgeMapProvider(MFMAP_BASEID) };
}
mapSources = new TreeMap<Integer, MapSource>();
for (MapProvider mp : mapProviders) {
mapSources.putAll(mp.getMapSources());
}
}
private static boolean isGoogleMapsInstalled() {
boolean googleMaps = true;
try {
Class.forName("com.google.android.maps.MapActivity");
} catch (ClassNotFoundException e) {
googleMaps = false;
}
return googleMaps;
}
public static SortedMap<Integer, MapSource> getMapSources() {
return mapSources;
}
public static boolean isValidSourceId(int sourceId) {
return mapSources.containsKey(sourceId);
}
public static boolean isSameActivity(int sourceId1, int sourceId2) {
for (MapProvider mp : mapProviders) {
if (mp.isMySource(sourceId1) && mp.isMySource(sourceId2)) {
return mp.isSameActivity(sourceId1, sourceId2);
}
}
return false;
}
public static MapProvider getMapProvider(int sourceId) {
for (MapProvider mp : mapProviders) {
if (mp.isMySource(sourceId)) {
return mp;
}
}
return mapProviders[0];
}
public static int getSourceOrdinalFromId(int sourceId) {
int sourceOrdinal = 0;
for (int key : mapSources.keySet()) {
if (sourceId == key) {
return sourceOrdinal;
}
sourceOrdinal++;
}
return 0;
}
public static int getSourceIdFromOrdinal(int sourceOrdinal) {
int count = 0;
for (int key : mapSources.keySet()) {
if (sourceOrdinal == count) {
return key;
}
count++;
}
return mapSources.firstKey();
}
public static void addMapviewMenuItems(Menu parentMenu, int groupId, int currentSource) {
for (Integer key : mapSources.keySet()) {
parentMenu.add(groupId, key, 0, mapSources.get(key).getName()).setCheckable(true).setChecked(key.intValue() == currentSource);
}
}
public static int getMapSourceFromMenuId(int menuId) {
return menuId;
}
public static MapSource getMapSource(int sourceId) {
return mapSources.get(Integer.valueOf(sourceId));
}
}
| Refactoring: remove unnecessary boxing/unboxing
| main/src/cgeo/geocaching/maps/MapProviderFactory.java | Refactoring: remove unnecessary boxing/unboxing |
|
Java | apache-2.0 | 438b40d7fe375a986300475ca978e8b83ba1a560 | 0 | Polidea/RxAndroidBle,Polidea/RxAndroidBle,Polidea/RxAndroidBle | package com.polidea.rxandroidble;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGattCallback;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.polidea.rxandroidble.exceptions.BleDisconnectedException;
import com.polidea.rxandroidble.exceptions.BleGattCallbackTimeoutException;
import com.polidea.rxandroidble.exceptions.BleGattException;
import rx.Observable;
public interface RxBleDevice {
/**
* Observe changes to connection state of the device's {@link android.bluetooth.BluetoothGatt}.
* This Observable will never emit errors.
*
* If you would like to have the initial state as well you can use observeConnectionStateChanges().startWith(getConnectionState())
*
* NOTE: This is a convenience function for easy state changes monitoring of an individual peripheral that may be useful in the UI.
* It is not meant to be a trigger for reconnecting a particular device—for this purpose one should react on the errors emitted from
* {@link #establishConnection(boolean)}
*
* @return observable that will emit {@link com.polidea.rxandroidble.RxBleConnection.RxBleConnectionState} changes
*/
Observable<RxBleConnection.RxBleConnectionState> observeConnectionStateChanges();
/**
* Returns current connection state of the device's {@link android.bluetooth.BluetoothGatt}
*
* @return the RxBleConnectionState
*/
RxBleConnection.RxBleConnectionState getConnectionState();
/**
* @param context Android's context.
* @param autoConnect Flag related to
* {@link android.bluetooth.BluetoothDevice#connectGatt(Context, boolean, BluetoothGattCallback)} autoConnect flag.
* If false, the connection will fail with {@link com.polidea.rxandroidble.exceptions.BleGattException} if
* the device is not in range after a 30-second timeout. If true, the connection will be pending indefinitely.
* Unlike the native Android API, if set to true and the connection is lost there will NOT be any attempt to
* reconnect unless explicitly resubscribed.
* @return Observable emitting the connection.
* @throws BleDisconnectedException emitted when the BLE link has been disconnected either when the connection
* was already established or was in pending connection state. This occurs when the
* connection was released as a part of expected behavior
* (with {@link android.bluetooth.BluetoothGatt#GATT_SUCCESS} state).
* @throws BleGattException emitted when the BLE link has been interrupted as a result of an error.
* The exception contains detailed explanation of the error source (type of operation) and
* the code proxied from the Android system.
* @throws BleGattCallbackTimeoutException emitted when an internal timeout for connection has been reached. The operation will
* timeout in direct mode (autoConnect = false) after 35 seconds.
* @see #establishConnection(boolean). The context is no longer required.
*/
@Deprecated
Observable<RxBleConnection> establishConnection(Context context, boolean autoConnect);
/**
* Establishes connection with a given BLE device. {@link RxBleConnection} is a handle, used to process BLE operations with a connected
* device.
* <p>
* The connection is automatically disconnected (and released) when resulting Observable is unsubscribed.
* On the other hand when the connections is interrupted by the device or the system, the Observable will be unsubscribed as well
* following BleDisconnectedException or BleGattException emission.
* <p>
* During the disconnect process the library automatically handles order and requirement of device disconnect and gatt close operations.
* <p>
* Autoconnect concept may be misleading at first glance. In cases when the BLE device is available and it is advertising constantly you
* won't need to use autoconnect. Use autoconnect for connections where the BLE device is not advertising at
* the moment of #establishConnection call.
*
* @param autoConnect Flag related to
* {@link android.bluetooth.BluetoothDevice#connectGatt(Context, boolean, BluetoothGattCallback)} autoConnect flag.
* If false, the connection will fail with {@link com.polidea.rxandroidble.exceptions.BleGattException} if
* the device is not in range after a 30-second timeout. If true, the connection will be pending indefinitely.
* Unlike the native Android API, if set to true and the connection is lost there will NOT be any attempt to
* reconnect unless explicitly resubscribed.
* @return Observable emitting the connection.
* @throws BleDisconnectedException emitted when the BLE link has been disconnected either when the connection
* was already established or was in pending connection state. This occurs when the
* connection was released as a part of expected behavior
* (with {@link android.bluetooth.BluetoothGatt#GATT_SUCCESS} state).
* @throws BleGattException emitted when the BLE link has been interrupted as a result of an error.
* The exception contains detailed explanation of the error source (type of operation) and
* the code proxied from the Android system.
* @throws BleGattCallbackTimeoutException emitted when an internal timeout for connection has been reached. The operation will
* timeout in direct mode (autoConnect = false) after 35 seconds.
*/
Observable<RxBleConnection> establishConnection(boolean autoConnect);
/**
* Establishes connection with a given BLE device. {@link RxBleConnection} is a handle, used to process BLE operations with a connected
* device.
* <p>
* The connection is automatically disconnected (and released) when resulting Observable is unsubscribed.
* On the other hand when the connections is interrupted by the device or the system, the Observable will be unsubscribed as well
* following BleDisconnectedException or BleGattException emission.
* <p>
* During the disconnect process the library automatically handles order and requirement of device disconnect and gatt close operations.
* <p>
* Autoconnect concept may be misleading at first glance. In cases when the BLE device is available and it is advertising constantly you
* won't need to use autoconnect. Use autoconnect for connections where the BLE device is not advertising at
* the moment of #establishConnection call.
*
* @param autoConnect Flag related to
* {@link android.bluetooth.BluetoothDevice#connectGatt(Context, boolean, BluetoothGattCallback)}
* autoConnect flag.
* If false, the connection will fail with {@link com.polidea.rxandroidble.exceptions.BleGattException} if
* the device is not in range after a 30-second timeout. If true, the connection will be pending indefinitely.
* Unlike the native Android API, if set to true and the connection is lost there will NOT be any attempt to
* reconnect unless explicitly resubscribed.
* @param operationTimeout Timeout configuration after which the operation will be considered as broken. Eventually the operation
* will be canceled and removed from queue. Keep in mind that it will cancel the library's operation
* only and may leave Android's BLE stack in an inconsistent state.
* @return Observable emitting the connection.
* @throws BleDisconnectedException emitted when the BLE link has been disconnected either when the connection
* was already established or was in pending connection state. This occurs when the
* connection was released as a part of expected behavior
* (with {@link android.bluetooth.BluetoothGatt#GATT_SUCCESS} state).
* @throws BleGattException emitted when the BLE link has been interrupted as a result of an error.
* The exception contains detailed explanation of the error source (type of operation) and
* the code proxied from the Android system.
* @throws BleGattCallbackTimeoutException emitted when an internal timeout for connection has been reached. The operation will
* timeout in direct mode (autoConnect = false) after 35 seconds.
*/
Observable<RxBleConnection> establishConnection(boolean autoConnect, @NonNull Timeout operationTimeout);
/**
* Name of the device. Name is optional and it's up to the device vendor if will be provided.
*
* @return The device name or null if device name is absent.
*/
@Nullable
String getName();
/**
* MAC address of the corresponding device.
*/
String getMacAddress();
/**
* The underlying android.bluetooth.BluetoothDevice.
*
* NOTE: this should be used with caution and knowledge as interaction with the BluetoothDevice may interrupt the flow of this library.
* @return the BluetoothDevice
*/
BluetoothDevice getBluetoothDevice();
}
| rxandroidble/src/main/java/com/polidea/rxandroidble/RxBleDevice.java | package com.polidea.rxandroidble;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGattCallback;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.polidea.rxandroidble.exceptions.BleDisconnectedException;
import com.polidea.rxandroidble.exceptions.BleGattCallbackTimeoutException;
import com.polidea.rxandroidble.exceptions.BleGattException;
import rx.Observable;
public interface RxBleDevice {
/**
* Observe changes to connection state of the device's {@link android.bluetooth.BluetoothGatt}.
* This Observable will never emit errors.
*
* If you would like to have the initial state as well you can use observeConnectionStateChanges().startWith(getConnectionState())
*
* @return observable that will emit {@link com.polidea.rxandroidble.RxBleConnection.RxBleConnectionState} changes
*/
Observable<RxBleConnection.RxBleConnectionState> observeConnectionStateChanges();
/**
* Returns current connection state of the device's {@link android.bluetooth.BluetoothGatt}
*
* @return the RxBleConnectionState
*/
RxBleConnection.RxBleConnectionState getConnectionState();
/**
* @param context Android's context.
* @param autoConnect Flag related to
* {@link android.bluetooth.BluetoothDevice#connectGatt(Context, boolean, BluetoothGattCallback)} autoConnect flag.
* If false, the connection will fail with {@link com.polidea.rxandroidble.exceptions.BleGattException} if
* the device is not in range after a 30-second timeout. If true, the connection will be pending indefinitely.
* Unlike the native Android API, if set to true and the connection is lost there will NOT be any attempt to
* reconnect unless explicitly resubscribed.
* @return Observable emitting the connection.
* @throws BleDisconnectedException emitted when the BLE link has been disconnected either when the connection
* was already established or was in pending connection state. This occurs when the
* connection was released as a part of expected behavior
* (with {@link android.bluetooth.BluetoothGatt#GATT_SUCCESS} state).
* @throws BleGattException emitted when the BLE link has been interrupted as a result of an error.
* The exception contains detailed explanation of the error source (type of operation) and
* the code proxied from the Android system.
* @throws BleGattCallbackTimeoutException emitted when an internal timeout for connection has been reached. The operation will
* timeout in direct mode (autoConnect = false) after 35 seconds.
* @see #establishConnection(boolean). The context is no longer required.
*/
@Deprecated
Observable<RxBleConnection> establishConnection(Context context, boolean autoConnect);
/**
* Establishes connection with a given BLE device. {@link RxBleConnection} is a handle, used to process BLE operations with a connected
* device.
* <p>
* The connection is automatically disconnected (and released) when resulting Observable is unsubscribed.
* On the other hand when the connections is interrupted by the device or the system, the Observable will be unsubscribed as well
* following BleDisconnectedException or BleGattException emission.
* <p>
* During the disconnect process the library automatically handles order and requirement of device disconnect and gatt close operations.
* <p>
* Autoconnect concept may be misleading at first glance. In cases when the BLE device is available and it is advertising constantly you
* won't need to use autoconnect. Use autoconnect for connections where the BLE device is not advertising at
* the moment of #establishConnection call.
*
* @param autoConnect Flag related to
* {@link android.bluetooth.BluetoothDevice#connectGatt(Context, boolean, BluetoothGattCallback)} autoConnect flag.
* If false, the connection will fail with {@link com.polidea.rxandroidble.exceptions.BleGattException} if
* the device is not in range after a 30-second timeout. If true, the connection will be pending indefinitely.
* Unlike the native Android API, if set to true and the connection is lost there will NOT be any attempt to
* reconnect unless explicitly resubscribed.
* @return Observable emitting the connection.
* @throws BleDisconnectedException emitted when the BLE link has been disconnected either when the connection
* was already established or was in pending connection state. This occurs when the
* connection was released as a part of expected behavior
* (with {@link android.bluetooth.BluetoothGatt#GATT_SUCCESS} state).
* @throws BleGattException emitted when the BLE link has been interrupted as a result of an error.
* The exception contains detailed explanation of the error source (type of operation) and
* the code proxied from the Android system.
* @throws BleGattCallbackTimeoutException emitted when an internal timeout for connection has been reached. The operation will
* timeout in direct mode (autoConnect = false) after 35 seconds.
*/
Observable<RxBleConnection> establishConnection(boolean autoConnect);
/**
* Establishes connection with a given BLE device. {@link RxBleConnection} is a handle, used to process BLE operations with a connected
* device.
* <p>
* The connection is automatically disconnected (and released) when resulting Observable is unsubscribed.
* On the other hand when the connections is interrupted by the device or the system, the Observable will be unsubscribed as well
* following BleDisconnectedException or BleGattException emission.
* <p>
* During the disconnect process the library automatically handles order and requirement of device disconnect and gatt close operations.
* <p>
* Autoconnect concept may be misleading at first glance. In cases when the BLE device is available and it is advertising constantly you
* won't need to use autoconnect. Use autoconnect for connections where the BLE device is not advertising at
* the moment of #establishConnection call.
*
* @param autoConnect Flag related to
* {@link android.bluetooth.BluetoothDevice#connectGatt(Context, boolean, BluetoothGattCallback)}
* autoConnect flag.
* If false, the connection will fail with {@link com.polidea.rxandroidble.exceptions.BleGattException} if
* the device is not in range after a 30-second timeout. If true, the connection will be pending indefinitely.
* Unlike the native Android API, if set to true and the connection is lost there will NOT be any attempt to
* reconnect unless explicitly resubscribed.
* @param operationTimeout Timeout configuration after which the operation will be considered as broken. Eventually the operation
* will be canceled and removed from queue. Keep in mind that it will cancel the library's operation
* only and may leave Android's BLE stack in an inconsistent state.
* @return Observable emitting the connection.
* @throws BleDisconnectedException emitted when the BLE link has been disconnected either when the connection
* was already established or was in pending connection state. This occurs when the
* connection was released as a part of expected behavior
* (with {@link android.bluetooth.BluetoothGatt#GATT_SUCCESS} state).
* @throws BleGattException emitted when the BLE link has been interrupted as a result of an error.
* The exception contains detailed explanation of the error source (type of operation) and
* the code proxied from the Android system.
* @throws BleGattCallbackTimeoutException emitted when an internal timeout for connection has been reached. The operation will
* timeout in direct mode (autoConnect = false) after 35 seconds.
*/
Observable<RxBleConnection> establishConnection(boolean autoConnect, @NonNull Timeout operationTimeout);
/**
* Name of the device. Name is optional and it's up to the device vendor if will be provided.
*
* @return The device name or null if device name is absent.
*/
@Nullable
String getName();
/**
* MAC address of the corresponding device.
*/
String getMacAddress();
/**
* The underlying android.bluetooth.BluetoothDevice.
*
* NOTE: this should be used with caution and knowledge as interaction with the BluetoothDevice may interrupt the flow of this library.
* @return the BluetoothDevice
*/
BluetoothDevice getBluetoothDevice();
}
| Added a usage hint to device’s observe state function (#381)
RxBleDevice.observeConnectionStateChanges() function not meant to be used as a reconnection logic trigger. It is only a convenience function for using in the UI. | rxandroidble/src/main/java/com/polidea/rxandroidble/RxBleDevice.java | Added a usage hint to device’s observe state function (#381) |
|
Java | apache-2.0 | 05d92b9b686889d9246c5471d90c5092fb6afc3b | 0 | vvcephei/elasticsearch,polyfractal/elasticsearch,Chhunlong/elasticsearch,Charlesdong/elasticsearch,vvcephei/elasticsearch,Stacey-Gammon/elasticsearch,palecur/elasticsearch,jeteve/elasticsearch,sdauletau/elasticsearch,ckclark/elasticsearch,thecocce/elasticsearch,Shekharrajak/elasticsearch,markwalkom/elasticsearch,hirdesh2008/elasticsearch,jw0201/elastic,jprante/elasticsearch,kimimj/elasticsearch,javachengwc/elasticsearch,brwe/elasticsearch,Asimov4/elasticsearch,Widen/elasticsearch,socialrank/elasticsearch,nomoa/elasticsearch,peschlowp/elasticsearch,ThiagoGarciaAlves/elasticsearch,geidies/elasticsearch,geidies/elasticsearch,AshishThakur/elasticsearch,yanjunh/elasticsearch,Asimov4/elasticsearch,amaliujia/elasticsearch,myelin/elasticsearch,pranavraman/elasticsearch,wayeast/elasticsearch,petabytedata/elasticsearch,dataduke/elasticsearch,jeteve/elasticsearch,MjAbuz/elasticsearch,nezirus/elasticsearch,slavau/elasticsearch,bestwpw/elasticsearch,sdauletau/elasticsearch,opendatasoft/elasticsearch,MichaelLiZhou/elasticsearch,Fsero/elasticsearch,Widen/elasticsearch,apepper/elasticsearch,djschny/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,gingerwizard/elasticsearch,cwurm/elasticsearch,elasticdog/elasticsearch,nrkkalyan/elasticsearch,Liziyao/elasticsearch,wangtuo/elasticsearch,btiernay/elasticsearch,mapr/elasticsearch,ivansun1010/elasticsearch,YosuaMichael/elasticsearch,beiske/elasticsearch,lmtwga/elasticsearch,MetSystem/elasticsearch,lzo/elasticsearch-1,Liziyao/elasticsearch,obourgain/elasticsearch,Siddartha07/elasticsearch,EasonYi/elasticsearch,girirajsharma/elasticsearch,springning/elasticsearch,luiseduardohdbackup/elasticsearch,loconsolutions/elasticsearch,khiraiwa/elasticsearch,diendt/elasticsearch,likaiwalkman/elasticsearch,Collaborne/elasticsearch,karthikjaps/elasticsearch,JackyMai/elasticsearch,alexshadow007/elasticsearch,vvcephei/elasticsearch,masaruh/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,ThiagoGarciaAlves/elasticsearch,andrestc/elasticsearch,skearns64/elasticsearch,hydro2k/elasticsearch,strapdata/elassandra5-rc,micpalmia/elasticsearch,GlenRSmith/elasticsearch,pablocastro/elasticsearch,Brijeshrpatel9/elasticsearch,xuzha/elasticsearch,dylan8902/elasticsearch,geidies/elasticsearch,mortonsykes/elasticsearch,hechunwen/elasticsearch,MetSystem/elasticsearch,MjAbuz/elasticsearch,spiegela/elasticsearch,kingaj/elasticsearch,areek/elasticsearch,vroyer/elassandra,nilabhsagar/elasticsearch,GlenRSmith/elasticsearch,MetSystem/elasticsearch,kunallimaye/elasticsearch,weipinghe/elasticsearch,AshishThakur/elasticsearch,ouyangkongtong/elasticsearch,Microsoft/elasticsearch,markllama/elasticsearch,petmit/elasticsearch,Helen-Zhao/elasticsearch,polyfractal/elasticsearch,jchampion/elasticsearch,palecur/elasticsearch,slavau/elasticsearch,Microsoft/elasticsearch,gingerwizard/elasticsearch,Charlesdong/elasticsearch,acchen97/elasticsearch,jbertouch/elasticsearch,MichaelLiZhou/elasticsearch,Kakakakakku/elasticsearch,spiegela/elasticsearch,phani546/elasticsearch,hydro2k/elasticsearch,scottsom/elasticsearch,ThiagoGarciaAlves/elasticsearch,bestwpw/elasticsearch,pablocastro/elasticsearch,gmarz/elasticsearch,mjhennig/elasticsearch,Collaborne/elasticsearch,jw0201/elastic,hanswang/elasticsearch,mortonsykes/elasticsearch,abibell/elasticsearch,wittyameta/elasticsearch,ckclark/elasticsearch,F0lha/elasticsearch,onegambler/elasticsearch,likaiwalkman/elasticsearch,rento19962/elasticsearch,codebunt/elasticsearch,btiernay/elasticsearch,caengcjd/elasticsearch,dataduke/elasticsearch,HarishAtGitHub/elasticsearch,martinstuga/elasticsearch,wayeast/elasticsearch,lchennup/elasticsearch,xingguang2013/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,tebriel/elasticsearch,yuy168/elasticsearch,golubev/elasticsearch,KimTaehee/elasticsearch,jchampion/elasticsearch,scottsom/elasticsearch,markwalkom/elasticsearch,yuy168/elasticsearch,Siddartha07/elasticsearch,kevinkluge/elasticsearch,rlugojr/elasticsearch,Chhunlong/elasticsearch,kingaj/elasticsearch,jimhooker2002/elasticsearch,vvcephei/elasticsearch,smflorentino/elasticsearch,vrkansagara/elasticsearch,gmarz/elasticsearch,kimimj/elasticsearch,acchen97/elasticsearch,alexbrasetvik/elasticsearch,glefloch/elasticsearch,HonzaKral/elasticsearch,linglaiyao1314/elasticsearch,maddin2016/elasticsearch,drewr/elasticsearch,qwerty4030/elasticsearch,feiqitian/elasticsearch,mortonsykes/elasticsearch,masterweb121/elasticsearch,Flipkart/elasticsearch,mute/elasticsearch,fernandozhu/elasticsearch,Clairebi/ElasticsearchClone,elancom/elasticsearch,nellicus/elasticsearch,brwe/elasticsearch,ckclark/elasticsearch,aglne/elasticsearch,markwalkom/elasticsearch,andrejserafim/elasticsearch,MaineC/elasticsearch,mjason3/elasticsearch,dantuffery/elasticsearch,diendt/elasticsearch,gingerwizard/elasticsearch,geidies/elasticsearch,javachengwc/elasticsearch,AndreKR/elasticsearch,C-Bish/elasticsearch,sreeramjayan/elasticsearch,tsohil/elasticsearch,hafkensite/elasticsearch,springning/elasticsearch,avikurapati/elasticsearch,aglne/elasticsearch,vvcephei/elasticsearch,henakamaMSFT/elasticsearch,kenshin233/elasticsearch,luiseduardohdbackup/elasticsearch,dongjoon-hyun/elasticsearch,socialrank/elasticsearch,MaineC/elasticsearch,ThalaivaStars/OrgRepo1,jsgao0/elasticsearch,adrianbk/elasticsearch,overcome/elasticsearch,hydro2k/elasticsearch,sarwarbhuiyan/elasticsearch,pranavraman/elasticsearch,girirajsharma/elasticsearch,ESamir/elasticsearch,vroyer/elassandra,wangyuxue/elasticsearch,hechunwen/elasticsearch,masterweb121/elasticsearch,alexkuk/elasticsearch,shreejay/elasticsearch,ivansun1010/elasticsearch,kingaj/elasticsearch,C-Bish/elasticsearch,uschindler/elasticsearch,tkssharma/elasticsearch,iantruslove/elasticsearch,vietlq/elasticsearch,pablocastro/elasticsearch,naveenhooda2000/elasticsearch,wenpos/elasticsearch,mmaracic/elasticsearch,mute/elasticsearch,franklanganke/elasticsearch,martinstuga/elasticsearch,acchen97/elasticsearch,tsohil/elasticsearch,amit-shar/elasticsearch,sc0ttkclark/elasticsearch,huypx1292/elasticsearch,Collaborne/elasticsearch,MisterAndersen/elasticsearch,boliza/elasticsearch,JervyShi/elasticsearch,Helen-Zhao/elasticsearch,socialrank/elasticsearch,iantruslove/elasticsearch,jpountz/elasticsearch,likaiwalkman/elasticsearch,mmaracic/elasticsearch,vroyer/elasticassandra,wbowling/elasticsearch,feiqitian/elasticsearch,kalburgimanjunath/elasticsearch,areek/elasticsearch,awislowski/elasticsearch,wbowling/elasticsearch,bestwpw/elasticsearch,gmarz/elasticsearch,huanzhong/elasticsearch,mnylen/elasticsearch,iamjakob/elasticsearch,franklanganke/elasticsearch,iacdingping/elasticsearch,scottsom/elasticsearch,elasticdog/elasticsearch,rajanm/elasticsearch,ESamir/elasticsearch,Kakakakakku/elasticsearch,yuy168/elasticsearch,ZTE-PaaS/elasticsearch,EasonYi/elasticsearch,truemped/elasticsearch,bawse/elasticsearch,AshishThakur/elasticsearch,winstonewert/elasticsearch,truemped/elasticsearch,andrejserafim/elasticsearch,camilojd/elasticsearch,wittyameta/elasticsearch,ZTE-PaaS/elasticsearch,Ansh90/elasticsearch,wenpos/elasticsearch,nrkkalyan/elasticsearch,gfyoung/elasticsearch,MaineC/elasticsearch,ouyangkongtong/elasticsearch,AleksKochev/elasticsearch,wenpos/elasticsearch,MichaelLiZhou/elasticsearch,scorpionvicky/elasticsearch,lchennup/elasticsearch,zeroctu/elasticsearch,tahaemin/elasticsearch,anti-social/elasticsearch,vroyer/elasticassandra,easonC/elasticsearch,huypx1292/elasticsearch,iacdingping/elasticsearch,winstonewert/elasticsearch,Rygbee/elasticsearch,rajanm/elasticsearch,s1monw/elasticsearch,petabytedata/elasticsearch,s1monw/elasticsearch,hydro2k/elasticsearch,JSCooke/elasticsearch,lks21c/elasticsearch,trangvh/elasticsearch,StefanGor/elasticsearch,IanvsPoplicola/elasticsearch,markharwood/elasticsearch,Liziyao/elasticsearch,achow/elasticsearch,dataduke/elasticsearch,geidies/elasticsearch,bawse/elasticsearch,kubum/elasticsearch,a2lin/elasticsearch,martinstuga/elasticsearch,humandb/elasticsearch,sneivandt/elasticsearch,kimimj/elasticsearch,thecocce/elasticsearch,nilabhsagar/elasticsearch,jango2015/elasticsearch,codebunt/elasticsearch,ouyangkongtong/elasticsearch,jaynblue/elasticsearch,nazarewk/elasticsearch,tcucchietti/elasticsearch,xuzha/elasticsearch,henakamaMSFT/elasticsearch,codebunt/elasticsearch,dantuffery/elasticsearch,lightslife/elasticsearch,ivansun1010/elasticsearch,xingguang2013/elasticsearch,combinatorist/elasticsearch,cnfire/elasticsearch-1,lydonchandra/elasticsearch,njlawton/elasticsearch,koxa29/elasticsearch,lightslife/elasticsearch,pranavraman/elasticsearch,zhiqinghuang/elasticsearch,infusionsoft/elasticsearch,huypx1292/elasticsearch,mgalushka/elasticsearch,NBSW/elasticsearch,Asimov4/elasticsearch,VukDukic/elasticsearch,fforbeck/elasticsearch,rhoml/elasticsearch,ricardocerq/elasticsearch,rhoml/elasticsearch,jango2015/elasticsearch,wayeast/elasticsearch,ricardocerq/elasticsearch,pranavraman/elasticsearch,ydsakyclguozi/elasticsearch,jimczi/elasticsearch,beiske/elasticsearch,skearns64/elasticsearch,petmit/elasticsearch,Clairebi/ElasticsearchClone,lks21c/elasticsearch,ajhalani/elasticsearch,achow/elasticsearch,infusionsoft/elasticsearch,episerver/elasticsearch,hafkensite/elasticsearch,xuzha/elasticsearch,opendatasoft/elasticsearch,artnowo/elasticsearch,wayeast/elasticsearch,nellicus/elasticsearch,xpandan/elasticsearch,markharwood/elasticsearch,Shekharrajak/elasticsearch,HonzaKral/elasticsearch,slavau/elasticsearch,nezirus/elasticsearch,Fsero/elasticsearch,achow/elasticsearch,markllama/elasticsearch,NBSW/elasticsearch,xingguang2013/elasticsearch,mnylen/elasticsearch,JackyMai/elasticsearch,khiraiwa/elasticsearch,Shekharrajak/elasticsearch,kubum/elasticsearch,girirajsharma/elasticsearch,avikurapati/elasticsearch,qwerty4030/elasticsearch,zkidkid/elasticsearch,springning/elasticsearch,kunallimaye/elasticsearch,davidvgalbraith/elasticsearch,Ansh90/elasticsearch,humandb/elasticsearch,linglaiyao1314/elasticsearch,kimimj/elasticsearch,ThiagoGarciaAlves/elasticsearch,peschlowp/elasticsearch,LeoYao/elasticsearch,mbrukman/elasticsearch,naveenhooda2000/elasticsearch,truemped/elasticsearch,GlenRSmith/elasticsearch,njlawton/elasticsearch,wbowling/elasticsearch,pritishppai/elasticsearch,loconsolutions/elasticsearch,boliza/elasticsearch,fred84/elasticsearch,rento19962/elasticsearch,Stacey-Gammon/elasticsearch,LeoYao/elasticsearch,i-am-Nathan/elasticsearch,sjohnr/elasticsearch,YosuaMichael/elasticsearch,sauravmondallive/elasticsearch,sneivandt/elasticsearch,zhiqinghuang/elasticsearch,AndreKR/elasticsearch,tahaemin/elasticsearch,18098924759/elasticsearch,pritishppai/elasticsearch,hanst/elasticsearch,F0lha/elasticsearch,franklanganke/elasticsearch,jw0201/elastic,lydonchandra/elasticsearch,humandb/elasticsearch,socialrank/elasticsearch,Ansh90/elasticsearch,tkssharma/elasticsearch,chirilo/elasticsearch,karthikjaps/elasticsearch,sposam/elasticsearch,acchen97/elasticsearch,YosuaMichael/elasticsearch,MetSystem/elasticsearch,xingguang2013/elasticsearch,abibell/elasticsearch,scottsom/elasticsearch,kevinkluge/elasticsearch,jaynblue/elasticsearch,weipinghe/elasticsearch,mkis-/elasticsearch,maddin2016/elasticsearch,Fsero/elasticsearch,amaliujia/elasticsearch,maddin2016/elasticsearch,dongjoon-hyun/elasticsearch,robin13/elasticsearch,lchennup/elasticsearch,mjhennig/elasticsearch,JSCooke/elasticsearch,karthikjaps/elasticsearch,areek/elasticsearch,himanshuag/elasticsearch,kalburgimanjunath/elasticsearch,PhaedrusTheGreek/elasticsearch,i-am-Nathan/elasticsearch,hanst/elasticsearch,skearns64/elasticsearch,markllama/elasticsearch,iantruslove/elasticsearch,jimhooker2002/elasticsearch,kcompher/elasticsearch,SergVro/elasticsearch,mrorii/elasticsearch,ckclark/elasticsearch,xuzha/elasticsearch,javachengwc/elasticsearch,tahaemin/elasticsearch,snikch/elasticsearch,dylan8902/elasticsearch,trangvh/elasticsearch,markwalkom/elasticsearch,caengcjd/elasticsearch,EasonYi/elasticsearch,NBSW/elasticsearch,yuy168/elasticsearch,henakamaMSFT/elasticsearch,naveenhooda2000/elasticsearch,Kakakakakku/elasticsearch,tsohil/elasticsearch,vvcephei/elasticsearch,kalburgimanjunath/elasticsearch,mcku/elasticsearch,alexshadow007/elasticsearch,alexkuk/elasticsearch,nilabhsagar/elasticsearch,sposam/elasticsearch,sjohnr/elasticsearch,andrejserafim/elasticsearch,mm0/elasticsearch,PhaedrusTheGreek/elasticsearch,amaliujia/elasticsearch,kevinkluge/elasticsearch,kubum/elasticsearch,mrorii/elasticsearch,likaiwalkman/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,jw0201/elastic,fekaputra/elasticsearch,franklanganke/elasticsearch,markharwood/elasticsearch,elasticdog/elasticsearch,mgalushka/elasticsearch,hanst/elasticsearch,brandonkearby/elasticsearch,episerver/elasticsearch,cwurm/elasticsearch,hafkensite/elasticsearch,sreeramjayan/elasticsearch,AleksKochev/elasticsearch,chrismwendt/elasticsearch,easonC/elasticsearch,onegambler/elasticsearch,amaliujia/elasticsearch,hafkensite/elasticsearch,scorpionvicky/elasticsearch,VukDukic/elasticsearch,kalimatas/elasticsearch,apepper/elasticsearch,adrianbk/elasticsearch,koxa29/elasticsearch,wittyameta/elasticsearch,markharwood/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,chrismwendt/elasticsearch,ydsakyclguozi/elasticsearch,pritishppai/elasticsearch,hirdesh2008/elasticsearch,Kakakakakku/elasticsearch,avikurapati/elasticsearch,franklanganke/elasticsearch,mute/elasticsearch,mapr/elasticsearch,lchennup/elasticsearch,springning/elasticsearch,jbertouch/elasticsearch,geidies/elasticsearch,trangvh/elasticsearch,awislowski/elasticsearch,franklanganke/elasticsearch,JSCooke/elasticsearch,s1monw/elasticsearch,ImpressTV/elasticsearch,kimimj/elasticsearch,cwurm/elasticsearch,jbertouch/elasticsearch,wangyuxue/elasticsearch,rlugojr/elasticsearch,codebunt/elasticsearch,kubum/elasticsearch,micpalmia/elasticsearch,vingupta3/elasticsearch,xingguang2013/elasticsearch,jaynblue/elasticsearch,likaiwalkman/elasticsearch,nknize/elasticsearch,kenshin233/elasticsearch,mmaracic/elasticsearch,zkidkid/elasticsearch,sneivandt/elasticsearch,tebriel/elasticsearch,yongminxia/elasticsearch,infusionsoft/elasticsearch,weipinghe/elasticsearch,knight1128/elasticsearch,huypx1292/elasticsearch,a2lin/elasticsearch,mikemccand/elasticsearch,andrestc/elasticsearch,Widen/elasticsearch,KimTaehee/elasticsearch,masterweb121/elasticsearch,coding0011/elasticsearch,mohit/elasticsearch,fforbeck/elasticsearch,JervyShi/elasticsearch,zkidkid/elasticsearch,nellicus/elasticsearch,pritishppai/elasticsearch,caengcjd/elasticsearch,kkirsche/elasticsearch,apepper/elasticsearch,Kakakakakku/elasticsearch,golubev/elasticsearch,sauravmondallive/elasticsearch,xpandan/elasticsearch,sscarduzio/elasticsearch,sauravmondallive/elasticsearch,mrorii/elasticsearch,jimhooker2002/elasticsearch,anti-social/elasticsearch,rhoml/elasticsearch,HarishAtGitHub/elasticsearch,Liziyao/elasticsearch,acchen97/elasticsearch,djschny/elasticsearch,fooljohnny/elasticsearch,Ansh90/elasticsearch,lzo/elasticsearch-1,pritishppai/elasticsearch,phani546/elasticsearch,Flipkart/elasticsearch,elancom/elasticsearch,markllama/elasticsearch,lks21c/elasticsearch,sc0ttkclark/elasticsearch,ThiagoGarciaAlves/elasticsearch,khiraiwa/elasticsearch,martinstuga/elasticsearch,lzo/elasticsearch-1,girirajsharma/elasticsearch,ImpressTV/elasticsearch,Fsero/elasticsearch,rhoml/elasticsearch,lzo/elasticsearch-1,yongminxia/elasticsearch,khiraiwa/elasticsearch,Flipkart/elasticsearch,combinatorist/elasticsearch,markllama/elasticsearch,golubev/elasticsearch,nknize/elasticsearch,jpountz/elasticsearch,wimvds/elasticsearch,jeteve/elasticsearch,fooljohnny/elasticsearch,queirozfcom/elasticsearch,mbrukman/elasticsearch,martinstuga/elasticsearch,alexshadow007/elasticsearch,Charlesdong/elasticsearch,weipinghe/elasticsearch,hanswang/elasticsearch,maddin2016/elasticsearch,rajanm/elasticsearch,ThalaivaStars/OrgRepo1,codebunt/elasticsearch,kunallimaye/elasticsearch,iantruslove/elasticsearch,dylan8902/elasticsearch,djschny/elasticsearch,knight1128/elasticsearch,jsgao0/elasticsearch,huanzhong/elasticsearch,masaruh/elasticsearch,drewr/elasticsearch,chrismwendt/elasticsearch,Widen/elasticsearch,milodky/elasticsearch,glefloch/elasticsearch,LewayneNaidoo/elasticsearch,ivansun1010/elasticsearch,queirozfcom/elasticsearch,gingerwizard/elasticsearch,nknize/elasticsearch,wbowling/elasticsearch,Liziyao/elasticsearch,wuranbo/elasticsearch,overcome/elasticsearch,fernandozhu/elasticsearch,ulkas/elasticsearch,wimvds/elasticsearch,camilojd/elasticsearch,opendatasoft/elasticsearch,myelin/elasticsearch,HarishAtGitHub/elasticsearch,strapdata/elassandra5-rc,combinatorist/elasticsearch,Brijeshrpatel9/elasticsearch,camilojd/elasticsearch,ESamir/elasticsearch,yanjunh/elasticsearch,yanjunh/elasticsearch,scottsom/elasticsearch,tkssharma/elasticsearch,wenpos/elasticsearch,shreejay/elasticsearch,Asimov4/elasticsearch,Helen-Zhao/elasticsearch,vietlq/elasticsearch,hydro2k/elasticsearch,easonC/elasticsearch,nazarewk/elasticsearch,caengcjd/elasticsearch,kalburgimanjunath/elasticsearch,mbrukman/elasticsearch,luiseduardohdbackup/elasticsearch,sposam/elasticsearch,ESamir/elasticsearch,rmuir/elasticsearch,karthikjaps/elasticsearch,jeteve/elasticsearch,a2lin/elasticsearch,lydonchandra/elasticsearch,C-Bish/elasticsearch,qwerty4030/elasticsearch,rento19962/elasticsearch,xingguang2013/elasticsearch,linglaiyao1314/elasticsearch,robin13/elasticsearch,boliza/elasticsearch,AndreKR/elasticsearch,szroland/elasticsearch,mohit/elasticsearch,mute/elasticsearch,jprante/elasticsearch,naveenhooda2000/elasticsearch,scorpionvicky/elasticsearch,clintongormley/elasticsearch,umeshdangat/elasticsearch,skearns64/elasticsearch,HarishAtGitHub/elasticsearch,clintongormley/elasticsearch,dantuffery/elasticsearch,Microsoft/elasticsearch,pozhidaevak/elasticsearch,mute/elasticsearch,MjAbuz/elasticsearch,iamjakob/elasticsearch,Helen-Zhao/elasticsearch,elancom/elasticsearch,naveenhooda2000/elasticsearch,coding0011/elasticsearch,hechunwen/elasticsearch,elancom/elasticsearch,myelin/elasticsearch,javachengwc/elasticsearch,polyfractal/elasticsearch,yuy168/elasticsearch,sarwarbhuiyan/elasticsearch,golubev/elasticsearch,fooljohnny/elasticsearch,LeoYao/elasticsearch,phani546/elasticsearch,NBSW/elasticsearch,wittyameta/elasticsearch,petmit/elasticsearch,sjohnr/elasticsearch,Uiho/elasticsearch,TonyChai24/ESSource,luiseduardohdbackup/elasticsearch,cwurm/elasticsearch,gmarz/elasticsearch,mbrukman/elasticsearch,vingupta3/elasticsearch,truemped/elasticsearch,onegambler/elasticsearch,peschlowp/elasticsearch,strapdata/elassandra,huanzhong/elasticsearch,strapdata/elassandra-test,karthikjaps/elasticsearch,njlawton/elasticsearch,tkssharma/elasticsearch,tebriel/elasticsearch,KimTaehee/elasticsearch,Shepard1212/elasticsearch,jeteve/elasticsearch,wbowling/elasticsearch,AshishThakur/elasticsearch,Helen-Zhao/elasticsearch,jeteve/elasticsearch,ThalaivaStars/OrgRepo1,kcompher/elasticsearch,kimimj/elasticsearch,alexshadow007/elasticsearch,hirdesh2008/elasticsearch,ouyangkongtong/elasticsearch,yynil/elasticsearch,NBSW/elasticsearch,snikch/elasticsearch,mortonsykes/elasticsearch,fekaputra/elasticsearch,areek/elasticsearch,wimvds/elasticsearch,achow/elasticsearch,LewayneNaidoo/elasticsearch,fekaputra/elasticsearch,masterweb121/elasticsearch,pranavraman/elasticsearch,petabytedata/elasticsearch,Shepard1212/elasticsearch,YosuaMichael/elasticsearch,btiernay/elasticsearch,djschny/elasticsearch,masterweb121/elasticsearch,adrianbk/elasticsearch,mbrukman/elasticsearch,huanzhong/elasticsearch,Flipkart/elasticsearch,himanshuag/elasticsearch,aglne/elasticsearch,PhaedrusTheGreek/elasticsearch,ivansun1010/elasticsearch,iacdingping/elasticsearch,mbrukman/elasticsearch,adrianbk/elasticsearch,likaiwalkman/elasticsearch,NBSW/elasticsearch,nrkkalyan/elasticsearch,ImpressTV/elasticsearch,wenpos/elasticsearch,liweinan0423/elasticsearch,JSCooke/elasticsearch,MisterAndersen/elasticsearch,alexbrasetvik/elasticsearch,jprante/elasticsearch,golubev/elasticsearch,dataduke/elasticsearch,kalimatas/elasticsearch,mohit/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,ESamir/elasticsearch,MetSystem/elasticsearch,amaliujia/elasticsearch,dpursehouse/elasticsearch,jimczi/elasticsearch,masterweb121/elasticsearch,heng4fun/elasticsearch,nknize/elasticsearch,SergVro/elasticsearch,himanshuag/elasticsearch,infusionsoft/elasticsearch,AleksKochev/elasticsearch,masaruh/elasticsearch,chirilo/elasticsearch,Uiho/elasticsearch,knight1128/elasticsearch,LewayneNaidoo/elasticsearch,kenshin233/elasticsearch,mcku/elasticsearch,wayeast/elasticsearch,fernandozhu/elasticsearch,KimTaehee/elasticsearch,shreejay/elasticsearch,mjhennig/elasticsearch,nrkkalyan/elasticsearch,xpandan/elasticsearch,Widen/elasticsearch,nellicus/elasticsearch,ulkas/elasticsearch,anti-social/elasticsearch,linglaiyao1314/elasticsearch,lydonchandra/elasticsearch,clintongormley/elasticsearch,robin13/elasticsearch,rajanm/elasticsearch,sposam/elasticsearch,Chhunlong/elasticsearch,fekaputra/elasticsearch,amit-shar/elasticsearch,himanshuag/elasticsearch,rmuir/elasticsearch,thecocce/elasticsearch,fekaputra/elasticsearch,rhoml/elasticsearch,wangtuo/elasticsearch,Fsero/elasticsearch,brwe/elasticsearch,jprante/elasticsearch,bestwpw/elasticsearch,MichaelLiZhou/elasticsearch,Kakakakakku/elasticsearch,queirozfcom/elasticsearch,obourgain/elasticsearch,petabytedata/elasticsearch,areek/elasticsearch,feiqitian/elasticsearch,kunallimaye/elasticsearch,abibell/elasticsearch,jsgao0/elasticsearch,chirilo/elasticsearch,sarwarbhuiyan/elasticsearch,cnfire/elasticsearch-1,dpursehouse/elasticsearch,palecur/elasticsearch,lightslife/elasticsearch,Shekharrajak/elasticsearch,tebriel/elasticsearch,kalburgimanjunath/elasticsearch,tahaemin/elasticsearch,strapdata/elassandra,mortonsykes/elasticsearch,vietlq/elasticsearch,nezirus/elasticsearch,yongminxia/elasticsearch,strapdata/elassandra-test,JackyMai/elasticsearch,koxa29/elasticsearch,kalimatas/elasticsearch,jaynblue/elasticsearch,LeoYao/elasticsearch,cnfire/elasticsearch-1,adrianbk/elasticsearch,kunallimaye/elasticsearch,tcucchietti/elasticsearch,lks21c/elasticsearch,StefanGor/elasticsearch,vingupta3/elasticsearch,masaruh/elasticsearch,zkidkid/elasticsearch,ivansun1010/elasticsearch,truemped/elasticsearch,zeroctu/elasticsearch,ImpressTV/elasticsearch,drewr/elasticsearch,LewayneNaidoo/elasticsearch,Collaborne/elasticsearch,mgalushka/elasticsearch,smflorentino/elasticsearch,StefanGor/elasticsearch,feiqitian/elasticsearch,zhiqinghuang/elasticsearch,apepper/elasticsearch,Shekharrajak/elasticsearch,mohit/elasticsearch,gfyoung/elasticsearch,Siddartha07/elasticsearch,a2lin/elasticsearch,wayeast/elasticsearch,davidvgalbraith/elasticsearch,sreeramjayan/elasticsearch,dpursehouse/elasticsearch,combinatorist/elasticsearch,brandonkearby/elasticsearch,PhaedrusTheGreek/elasticsearch,skearns64/elasticsearch,jpountz/elasticsearch,janmejay/elasticsearch,mm0/elasticsearch,Clairebi/ElasticsearchClone,Shepard1212/elasticsearch,mjason3/elasticsearch,schonfeld/elasticsearch,areek/elasticsearch,ZTE-PaaS/elasticsearch,pozhidaevak/elasticsearch,GlenRSmith/elasticsearch,ckclark/elasticsearch,vroyer/elassandra,Liziyao/elasticsearch,kcompher/elasticsearch,xuzha/elasticsearch,sposam/elasticsearch,himanshuag/elasticsearch,tkssharma/elasticsearch,rajanm/elasticsearch,JervyShi/elasticsearch,EasonYi/elasticsearch,camilojd/elasticsearch,kalimatas/elasticsearch,strapdata/elassandra-test,mikemccand/elasticsearch,yuy168/elasticsearch,episerver/elasticsearch,coding0011/elasticsearch,lightslife/elasticsearch,sscarduzio/elasticsearch,masterweb121/elasticsearch,lmtwga/elasticsearch,Ansh90/elasticsearch,springning/elasticsearch,iacdingping/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,wimvds/elasticsearch,beiske/elasticsearch,mbrukman/elasticsearch,aglne/elasticsearch,janmejay/elasticsearch,mapr/elasticsearch,heng4fun/elasticsearch,markharwood/elasticsearch,sscarduzio/elasticsearch,awislowski/elasticsearch,njlawton/elasticsearch,MaineC/elasticsearch,diendt/elasticsearch,humandb/elasticsearch,elancom/elasticsearch,JervyShi/elasticsearch,brwe/elasticsearch,kunallimaye/elasticsearch,MetSystem/elasticsearch,KimTaehee/elasticsearch,robin13/elasticsearch,Collaborne/elasticsearch,drewr/elasticsearch,pablocastro/elasticsearch,milodky/elasticsearch,zeroctu/elasticsearch,sreeramjayan/elasticsearch,ydsakyclguozi/elasticsearch,huanzhong/elasticsearch,brandonkearby/elasticsearch,ricardocerq/elasticsearch,cnfire/elasticsearch-1,schonfeld/elasticsearch,zeroctu/elasticsearch,alexkuk/elasticsearch,btiernay/elasticsearch,chirilo/elasticsearch,NBSW/elasticsearch,gmarz/elasticsearch,strapdata/elassandra,Rygbee/elasticsearch,kkirsche/elasticsearch,strapdata/elassandra-test,likaiwalkman/elasticsearch,mmaracic/elasticsearch,schonfeld/elasticsearch,snikch/elasticsearch,ydsakyclguozi/elasticsearch,gingerwizard/elasticsearch,vietlq/elasticsearch,janmejay/elasticsearch,huypx1292/elasticsearch,snikch/elasticsearch,scorpionvicky/elasticsearch,JackyMai/elasticsearch,kcompher/elasticsearch,Rygbee/elasticsearch,njlawton/elasticsearch,kenshin233/elasticsearch,nazarewk/elasticsearch,MichaelLiZhou/elasticsearch,YosuaMichael/elasticsearch,lydonchandra/elasticsearch,pozhidaevak/elasticsearch,umeshdangat/elasticsearch,myelin/elasticsearch,obourgain/elasticsearch,fekaputra/elasticsearch,Brijeshrpatel9/elasticsearch,andrestc/elasticsearch,yanjunh/elasticsearch,ulkas/elasticsearch,ulkas/elasticsearch,hanswang/elasticsearch,peschlowp/elasticsearch,wbowling/elasticsearch,slavau/elasticsearch,jeteve/elasticsearch,infusionsoft/elasticsearch,mute/elasticsearch,alexbrasetvik/elasticsearch,pablocastro/elasticsearch,rento19962/elasticsearch,wittyameta/elasticsearch,luiseduardohdbackup/elasticsearch,kingaj/elasticsearch,markllama/elasticsearch,iamjakob/elasticsearch,Clairebi/ElasticsearchClone,linglaiyao1314/elasticsearch,xpandan/elasticsearch,huanzhong/elasticsearch,micpalmia/elasticsearch,Rygbee/elasticsearch,EasonYi/elasticsearch,Uiho/elasticsearch,javachengwc/elasticsearch,mikemccand/elasticsearch,weipinghe/elasticsearch,rajanm/elasticsearch,mjhennig/elasticsearch,palecur/elasticsearch,khiraiwa/elasticsearch,cnfire/elasticsearch-1,elancom/elasticsearch,onegambler/elasticsearch,obourgain/elasticsearch,coding0011/elasticsearch,easonC/elasticsearch,wangyuxue/elasticsearch,karthikjaps/elasticsearch,kalburgimanjunath/elasticsearch,alexkuk/elasticsearch,obourgain/elasticsearch,snikch/elasticsearch,mnylen/elasticsearch,AleksKochev/elasticsearch,kkirsche/elasticsearch,beiske/elasticsearch,mm0/elasticsearch,koxa29/elasticsearch,yongminxia/elasticsearch,wittyameta/elasticsearch,dylan8902/elasticsearch,andrestc/elasticsearch,janmejay/elasticsearch,KimTaehee/elasticsearch,KimTaehee/elasticsearch,kubum/elasticsearch,boliza/elasticsearch,lks21c/elasticsearch,jango2015/elasticsearch,kaneshin/elasticsearch,tebriel/elasticsearch,strapdata/elassandra,Uiho/elasticsearch,sscarduzio/elasticsearch,huanzhong/elasticsearch,bawse/elasticsearch,Shepard1212/elasticsearch,kunallimaye/elasticsearch,ZTE-PaaS/elasticsearch,davidvgalbraith/elasticsearch,ouyangkongtong/elasticsearch,Clairebi/ElasticsearchClone,aglne/elasticsearch,yynil/elasticsearch,ajhalani/elasticsearch,lydonchandra/elasticsearch,Ansh90/elasticsearch,thecocce/elasticsearch,camilojd/elasticsearch,MjAbuz/elasticsearch,kalburgimanjunath/elasticsearch,pritishppai/elasticsearch,amit-shar/elasticsearch,gfyoung/elasticsearch,btiernay/elasticsearch,MjAbuz/elasticsearch,lmtwga/elasticsearch,andrestc/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,kaneshin/elasticsearch,peschlowp/elasticsearch,slavau/elasticsearch,mm0/elasticsearch,F0lha/elasticsearch,zhiqinghuang/elasticsearch,janmejay/elasticsearch,nezirus/elasticsearch,strapdata/elassandra-test,abibell/elasticsearch,F0lha/elasticsearch,szroland/elasticsearch,dataduke/elasticsearch,Fsero/elasticsearch,awislowski/elasticsearch,bestwpw/elasticsearch,hafkensite/elasticsearch,MichaelLiZhou/elasticsearch,zkidkid/elasticsearch,IanvsPoplicola/elasticsearch,IanvsPoplicola/elasticsearch,kenshin233/elasticsearch,iamjakob/elasticsearch,nilabhsagar/elasticsearch,Uiho/elasticsearch,TonyChai24/ESSource,chirilo/elasticsearch,socialrank/elasticsearch,Shekharrajak/elasticsearch,hirdesh2008/elasticsearch,Brijeshrpatel9/elasticsearch,szroland/elasticsearch,feiqitian/elasticsearch,Brijeshrpatel9/elasticsearch,ulkas/elasticsearch,lmtwga/elasticsearch,hafkensite/elasticsearch,a2lin/elasticsearch,s1monw/elasticsearch,liweinan0423/elasticsearch,phani546/elasticsearch,sc0ttkclark/elasticsearch,C-Bish/elasticsearch,smflorentino/elasticsearch,iacdingping/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,kenshin233/elasticsearch,hanswang/elasticsearch,luiseduardohdbackup/elasticsearch,pablocastro/elasticsearch,mjason3/elasticsearch,YosuaMichael/elasticsearch,iantruslove/elasticsearch,Chhunlong/elasticsearch,Collaborne/elasticsearch,mcku/elasticsearch,strapdata/elassandra5-rc,lzo/elasticsearch-1,loconsolutions/elasticsearch,himanshuag/elasticsearch,Rygbee/elasticsearch,SergVro/elasticsearch,sreeramjayan/elasticsearch,tcucchietti/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,achow/elasticsearch,brandonkearby/elasticsearch,Rygbee/elasticsearch,drewr/elasticsearch,szroland/elasticsearch,lchennup/elasticsearch,mcku/elasticsearch,liweinan0423/elasticsearch,schonfeld/elasticsearch,iantruslove/elasticsearch,davidvgalbraith/elasticsearch,btiernay/elasticsearch,sauravmondallive/elasticsearch,winstonewert/elasticsearch,nellicus/elasticsearch,Collaborne/elasticsearch,jpountz/elasticsearch,chrismwendt/elasticsearch,caengcjd/elasticsearch,Siddartha07/elasticsearch,onegambler/elasticsearch,awislowski/elasticsearch,fforbeck/elasticsearch,aglne/elasticsearch,overcome/elasticsearch,mnylen/elasticsearch,VukDukic/elasticsearch,artnowo/elasticsearch,schonfeld/elasticsearch,lmtwga/elasticsearch,chirilo/elasticsearch,beiske/elasticsearch,hafkensite/elasticsearch,hanst/elasticsearch,wimvds/elasticsearch,szroland/elasticsearch,vingupta3/elasticsearch,IanvsPoplicola/elasticsearch,socialrank/elasticsearch,JervyShi/elasticsearch,ESamir/elasticsearch,milodky/elasticsearch,maddin2016/elasticsearch,vrkansagara/elasticsearch,markharwood/elasticsearch,kaneshin/elasticsearch,ouyangkongtong/elasticsearch,jsgao0/elasticsearch,vroyer/elasticassandra,anti-social/elasticsearch,caengcjd/elasticsearch,milodky/elasticsearch,jimhooker2002/elasticsearch,wimvds/elasticsearch,lightslife/elasticsearch,dpursehouse/elasticsearch,hanswang/elasticsearch,Shekharrajak/elasticsearch,TonyChai24/ESSource,hanswang/elasticsearch,rmuir/elasticsearch,jango2015/elasticsearch,gingerwizard/elasticsearch,SergVro/elasticsearch,clintongormley/elasticsearch,mrorii/elasticsearch,achow/elasticsearch,zeroctu/elasticsearch,jw0201/elastic,EasonYi/elasticsearch,pozhidaevak/elasticsearch,liweinan0423/elasticsearch,micpalmia/elasticsearch,MjAbuz/elasticsearch,queirozfcom/elasticsearch,PhaedrusTheGreek/elasticsearch,kkirsche/elasticsearch,nrkkalyan/elasticsearch,vrkansagara/elasticsearch,jchampion/elasticsearch,Stacey-Gammon/elasticsearch,queirozfcom/elasticsearch,wuranbo/elasticsearch,polyfractal/elasticsearch,umeshdangat/elasticsearch,andrestc/elasticsearch,areek/elasticsearch,mikemccand/elasticsearch,zhiqinghuang/elasticsearch,myelin/elasticsearch,kenshin233/elasticsearch,mkis-/elasticsearch,i-am-Nathan/elasticsearch,AshishThakur/elasticsearch,overcome/elasticsearch,LeoYao/elasticsearch,iamjakob/elasticsearch,nomoa/elasticsearch,ricardocerq/elasticsearch,AndreKR/elasticsearch,Asimov4/elasticsearch,tsohil/elasticsearch,jbertouch/elasticsearch,mcku/elasticsearch,andrejserafim/elasticsearch,mrorii/elasticsearch,springning/elasticsearch,i-am-Nathan/elasticsearch,Fsero/elasticsearch,avikurapati/elasticsearch,vingupta3/elasticsearch,boliza/elasticsearch,fernandozhu/elasticsearch,amit-shar/elasticsearch,wangtuo/elasticsearch,mkis-/elasticsearch,amit-shar/elasticsearch,jango2015/elasticsearch,rlugojr/elasticsearch,jimczi/elasticsearch,spiegela/elasticsearch,sauravmondallive/elasticsearch,lchennup/elasticsearch,camilojd/elasticsearch,bestwpw/elasticsearch,dylan8902/elasticsearch,adrianbk/elasticsearch,jsgao0/elasticsearch,zhiqinghuang/elasticsearch,Charlesdong/elasticsearch,knight1128/elasticsearch,ImpressTV/elasticsearch,queirozfcom/elasticsearch,Ansh90/elasticsearch,abibell/elasticsearch,ckclark/elasticsearch,sdauletau/elasticsearch,mm0/elasticsearch,trangvh/elasticsearch,nomoa/elasticsearch,hirdesh2008/elasticsearch,jprante/elasticsearch,kevinkluge/elasticsearch,phani546/elasticsearch,koxa29/elasticsearch,amit-shar/elasticsearch,PhaedrusTheGreek/elasticsearch,petabytedata/elasticsearch,sc0ttkclark/elasticsearch,feiqitian/elasticsearch,artnowo/elasticsearch,achow/elasticsearch,brwe/elasticsearch,hanst/elasticsearch,gfyoung/elasticsearch,djschny/elasticsearch,tahaemin/elasticsearch,Siddartha07/elasticsearch,rlugojr/elasticsearch,tahaemin/elasticsearch,hirdesh2008/elasticsearch,mnylen/elasticsearch,opendatasoft/elasticsearch,nellicus/elasticsearch,sjohnr/elasticsearch,Charlesdong/elasticsearch,andrejserafim/elasticsearch,nellicus/elasticsearch,djschny/elasticsearch,vrkansagara/elasticsearch,smflorentino/elasticsearch,zeroctu/elasticsearch,polyfractal/elasticsearch,winstonewert/elasticsearch,clintongormley/elasticsearch,polyfractal/elasticsearch,cnfire/elasticsearch-1,zeroctu/elasticsearch,yuy168/elasticsearch,StefanGor/elasticsearch,hanswang/elasticsearch,humandb/elasticsearch,jsgao0/elasticsearch,djschny/elasticsearch,uschindler/elasticsearch,kcompher/elasticsearch,easonC/elasticsearch,kkirsche/elasticsearch,GlenRSmith/elasticsearch,wuranbo/elasticsearch,diendt/elasticsearch,snikch/elasticsearch,AshishThakur/elasticsearch,wuranbo/elasticsearch,kubum/elasticsearch,tebriel/elasticsearch,btiernay/elasticsearch,linglaiyao1314/elasticsearch,opendatasoft/elasticsearch,rento19962/elasticsearch,alexbrasetvik/elasticsearch,kaneshin/elasticsearch,nomoa/elasticsearch,ricardocerq/elasticsearch,HonzaKral/elasticsearch,mgalushka/elasticsearch,milodky/elasticsearch,nrkkalyan/elasticsearch,lightslife/elasticsearch,fred84/elasticsearch,lchennup/elasticsearch,iacdingping/elasticsearch,18098924759/elasticsearch,alexbrasetvik/elasticsearch,HarishAtGitHub/elasticsearch,loconsolutions/elasticsearch,knight1128/elasticsearch,clintongormley/elasticsearch,kaneshin/elasticsearch,ulkas/elasticsearch,fooljohnny/elasticsearch,sreeramjayan/elasticsearch,xuzha/elasticsearch,mnylen/elasticsearch,heng4fun/elasticsearch,masaruh/elasticsearch,F0lha/elasticsearch,alexshadow007/elasticsearch,henakamaMSFT/elasticsearch,episerver/elasticsearch,mkis-/elasticsearch,gingerwizard/elasticsearch,HarishAtGitHub/elasticsearch,Siddartha07/elasticsearch,artnowo/elasticsearch,qwerty4030/elasticsearch,Flipkart/elasticsearch,yongminxia/elasticsearch,strapdata/elassandra,mm0/elasticsearch,fforbeck/elasticsearch,apepper/elasticsearch,tkssharma/elasticsearch,mcku/elasticsearch,xpandan/elasticsearch,girirajsharma/elasticsearch,jbertouch/elasticsearch,mkis-/elasticsearch,JSCooke/elasticsearch,jchampion/elasticsearch,kubum/elasticsearch,umeshdangat/elasticsearch,iacdingping/elasticsearch,Charlesdong/elasticsearch,dantuffery/elasticsearch,sposam/elasticsearch,markwalkom/elasticsearch,jchampion/elasticsearch,cnfire/elasticsearch-1,mjhennig/elasticsearch,abibell/elasticsearch,elancom/elasticsearch,tahaemin/elasticsearch,markllama/elasticsearch,elasticdog/elasticsearch,anti-social/elasticsearch,s1monw/elasticsearch,rhoml/elasticsearch,pranavraman/elasticsearch,heng4fun/elasticsearch,ydsakyclguozi/elasticsearch,jaynblue/elasticsearch,lmtwga/elasticsearch,Rygbee/elasticsearch,MjAbuz/elasticsearch,thecocce/elasticsearch,Shepard1212/elasticsearch,apepper/elasticsearch,Brijeshrpatel9/elasticsearch,adrianbk/elasticsearch,dongjoon-hyun/elasticsearch,truemped/elasticsearch,vietlq/elasticsearch,rmuir/elasticsearch,opendatasoft/elasticsearch,uschindler/elasticsearch,hanst/elasticsearch,lightslife/elasticsearch,vrkansagara/elasticsearch,sarwarbhuiyan/elasticsearch,skearns64/elasticsearch,rlugojr/elasticsearch,sc0ttkclark/elasticsearch,HarishAtGitHub/elasticsearch,MetSystem/elasticsearch,schonfeld/elasticsearch,yynil/elasticsearch,ajhalani/elasticsearch,karthikjaps/elasticsearch,girirajsharma/elasticsearch,sarwarbhuiyan/elasticsearch,spiegela/elasticsearch,Chhunlong/elasticsearch,apepper/elasticsearch,markwalkom/elasticsearch,smflorentino/elasticsearch,linglaiyao1314/elasticsearch,trangvh/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,wbowling/elasticsearch,amit-shar/elasticsearch,18098924759/elasticsearch,wittyameta/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,mjhennig/elasticsearch,palecur/elasticsearch,petmit/elasticsearch,dylan8902/elasticsearch,mjason3/elasticsearch,abibell/elasticsearch,bawse/elasticsearch,onegambler/elasticsearch,micpalmia/elasticsearch,jaynblue/elasticsearch,SergVro/elasticsearch,lydonchandra/elasticsearch,robin13/elasticsearch,socialrank/elasticsearch,pranavraman/elasticsearch,loconsolutions/elasticsearch,sc0ttkclark/elasticsearch,mute/elasticsearch,rmuir/elasticsearch,jw0201/elastic,VukDukic/elasticsearch,sjohnr/elasticsearch,shreejay/elasticsearch,slavau/elasticsearch,thecocce/elasticsearch,dantuffery/elasticsearch,hydro2k/elasticsearch,wayeast/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,koxa29/elasticsearch,Flipkart/elasticsearch,yongminxia/elasticsearch,khiraiwa/elasticsearch,kevinkluge/elasticsearch,yynil/elasticsearch,ouyangkongtong/elasticsearch,szroland/elasticsearch,pozhidaevak/elasticsearch,tcucchietti/elasticsearch,henakamaMSFT/elasticsearch,artnowo/elasticsearch,kimimj/elasticsearch,combinatorist/elasticsearch,wuranbo/elasticsearch,Uiho/elasticsearch,zhiqinghuang/elasticsearch,jimczi/elasticsearch,andrestc/elasticsearch,sneivandt/elasticsearch,mkis-/elasticsearch,elasticdog/elasticsearch,kcompher/elasticsearch,wimvds/elasticsearch,strapdata/elassandra-test,bawse/elasticsearch,mgalushka/elasticsearch,YosuaMichael/elasticsearch,nilabhsagar/elasticsearch,franklanganke/elasticsearch,sdauletau/elasticsearch,beiske/elasticsearch,mjason3/elasticsearch,jpountz/elasticsearch,sjohnr/elasticsearch,pablocastro/elasticsearch,nknize/elasticsearch,yongminxia/elasticsearch,rmuir/elasticsearch,strapdata/elassandra-test,Siddartha07/elasticsearch,strapdata/elassandra5-rc,tsohil/elasticsearch,episerver/elasticsearch,hydro2k/elasticsearch,ThalaivaStars/OrgRepo1,sscarduzio/elasticsearch,drewr/elasticsearch,yynil/elasticsearch,LewayneNaidoo/elasticsearch,queirozfcom/elasticsearch,kevinkluge/elasticsearch,sdauletau/elasticsearch,18098924759/elasticsearch,ThalaivaStars/OrgRepo1,yynil/elasticsearch,iamjakob/elasticsearch,codebunt/elasticsearch,mapr/elasticsearch,himanshuag/elasticsearch,Chhunlong/elasticsearch,mcku/elasticsearch,strapdata/elassandra5-rc,kingaj/elasticsearch,HonzaKral/elasticsearch,nrkkalyan/elasticsearch,Stacey-Gammon/elasticsearch,tsohil/elasticsearch,infusionsoft/elasticsearch,vietlq/elasticsearch,fekaputra/elasticsearch,alexbrasetvik/elasticsearch,AndreKR/elasticsearch,fforbeck/elasticsearch,mohit/elasticsearch,diendt/elasticsearch,qwerty4030/elasticsearch,mgalushka/elasticsearch,cwurm/elasticsearch,dpursehouse/elasticsearch,TonyChai24/ESSource,mapr/elasticsearch,ajhalani/elasticsearch,ydsakyclguozi/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,tkssharma/elasticsearch,LeoYao/elasticsearch,schonfeld/elasticsearch,yanjunh/elasticsearch,springning/elasticsearch,Chhunlong/elasticsearch,sarwarbhuiyan/elasticsearch,liweinan0423/elasticsearch,winstonewert/elasticsearch,MisterAndersen/elasticsearch,heng4fun/elasticsearch,jimczi/elasticsearch,jango2015/elasticsearch,uschindler/elasticsearch,Charlesdong/elasticsearch,lzo/elasticsearch-1,glefloch/elasticsearch,iamjakob/elasticsearch,dataduke/elasticsearch,coding0011/elasticsearch,onegambler/elasticsearch,MichaelLiZhou/elasticsearch,lzo/elasticsearch-1,vingupta3/elasticsearch,TonyChai24/ESSource,kevinkluge/elasticsearch,fred84/elasticsearch,IanvsPoplicola/elasticsearch,kingaj/elasticsearch,Brijeshrpatel9/elasticsearch,brandonkearby/elasticsearch,mmaracic/elasticsearch,Widen/elasticsearch,vrkansagara/elasticsearch,hirdesh2008/elasticsearch,JackyMai/elasticsearch,janmejay/elasticsearch,mnylen/elasticsearch,MaineC/elasticsearch,hechunwen/elasticsearch,jchampion/elasticsearch,fred84/elasticsearch,spiegela/elasticsearch,ulkas/elasticsearch,jbertouch/elasticsearch,glefloch/elasticsearch,mrorii/elasticsearch,StefanGor/elasticsearch,xpandan/elasticsearch,pritishppai/elasticsearch,hechunwen/elasticsearch,kkirsche/elasticsearch,dylan8902/elasticsearch,martinstuga/elasticsearch,caengcjd/elasticsearch,bestwpw/elasticsearch,umeshdangat/elasticsearch,nazarewk/elasticsearch,SergVro/elasticsearch,tcucchietti/elasticsearch,weipinghe/elasticsearch,rento19962/elasticsearch,jpountz/elasticsearch,overcome/elasticsearch,AndreKR/elasticsearch,petabytedata/elasticsearch,slavau/elasticsearch,TonyChai24/ESSource,MisterAndersen/elasticsearch,gfyoung/elasticsearch,LeoYao/elasticsearch,ImpressTV/elasticsearch,phani546/elasticsearch,weipinghe/elasticsearch,mikemccand/elasticsearch,kingaj/elasticsearch,ajhalani/elasticsearch,alexkuk/elasticsearch,mgalushka/elasticsearch,nazarewk/elasticsearch,anti-social/elasticsearch,Asimov4/elasticsearch,i-am-Nathan/elasticsearch,kaneshin/elasticsearch,Widen/elasticsearch,scorpionvicky/elasticsearch,mm0/elasticsearch,tsohil/elasticsearch,davidvgalbraith/elasticsearch,TonyChai24/ESSource,sneivandt/elasticsearch,huypx1292/elasticsearch,golubev/elasticsearch,vingupta3/elasticsearch,andrejserafim/elasticsearch,ZTE-PaaS/elasticsearch,iantruslove/elasticsearch,beiske/elasticsearch,uschindler/elasticsearch,truemped/elasticsearch,PhaedrusTheGreek/elasticsearch,jimhooker2002/elasticsearch,ThiagoGarciaAlves/elasticsearch,overcome/elasticsearch,dataduke/elasticsearch,hechunwen/elasticsearch,AleksKochev/elasticsearch,amaliujia/elasticsearch,diendt/elasticsearch,avikurapati/elasticsearch,VukDukic/elasticsearch,rento19962/elasticsearch,ThalaivaStars/OrgRepo1,lmtwga/elasticsearch,davidvgalbraith/elasticsearch,humandb/elasticsearch,sc0ttkclark/elasticsearch,Uiho/elasticsearch,knight1128/elasticsearch,sdauletau/elasticsearch,jango2015/elasticsearch,EasonYi/elasticsearch,fooljohnny/elasticsearch,sdauletau/elasticsearch,C-Bish/elasticsearch,ckclark/elasticsearch,Microsoft/elasticsearch,18098924759/elasticsearch,easonC/elasticsearch,dongjoon-hyun/elasticsearch,fooljohnny/elasticsearch,xingguang2013/elasticsearch,loconsolutions/elasticsearch,mjhennig/elasticsearch,vietlq/elasticsearch,Liziyao/elasticsearch,Stacey-Gammon/elasticsearch,javachengwc/elasticsearch,kalimatas/elasticsearch,mapr/elasticsearch,humandb/elasticsearch,wangtuo/elasticsearch,18098924759/elasticsearch,sarwarbhuiyan/elasticsearch,MisterAndersen/elasticsearch,sposam/elasticsearch,F0lha/elasticsearch,wangtuo/elasticsearch,acchen97/elasticsearch,chrismwendt/elasticsearch,jimhooker2002/elasticsearch,milodky/elasticsearch,sauravmondallive/elasticsearch,nezirus/elasticsearch,JervyShi/elasticsearch,infusionsoft/elasticsearch,dongjoon-hyun/elasticsearch,kcompher/elasticsearch,Clairebi/ElasticsearchClone,nomoa/elasticsearch,luiseduardohdbackup/elasticsearch,glefloch/elasticsearch,drewr/elasticsearch,alexkuk/elasticsearch,shreejay/elasticsearch,fernandozhu/elasticsearch,ImpressTV/elasticsearch,18098924759/elasticsearch,petmit/elasticsearch,smflorentino/elasticsearch,acchen97/elasticsearch,petabytedata/elasticsearch,jimhooker2002/elasticsearch,Microsoft/elasticsearch,mmaracic/elasticsearch,knight1128/elasticsearch,fred84/elasticsearch | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
import org.elasticsearch.node.internal.InternalNode;
import org.junit.After;
import org.junit.Ignore;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.*;
/**
* A test that keep a single node started for all tests that can be used to get
* references to Guice injectors in unit tests.
*/
@Ignore
public abstract class ElasticsearchSingleNodeTest extends ElasticsearchTestCase {
private static final Node node = node();
@After
public void after() {
node.client().admin().indices().prepareDelete("*").get();
MetaData metaData = node.client().admin().cluster().prepareState().get().getState().getMetaData();
assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().getAsMap(),
metaData.persistentSettings().getAsMap().size(), equalTo(0));
assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().getAsMap(),
metaData.transientSettings().getAsMap().size(), equalTo(0));
}
/**
* Same as {@link #node(Settings) node(ImmutableSettings.EMPTY)}.
*/
private static Node node() {
return node(ImmutableSettings.EMPTY);
}
private static Node node(Settings settings) {
Node build = NodeBuilder.nodeBuilder().local(true).data(true).settings(ImmutableSettings.builder()
.put(ClusterName.SETTING, ElasticsearchSingleNodeTest.class.getName())
.put("node.name", ElasticsearchSingleNodeTest.class.getName())
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put(settings)
.put(EsExecutors.PROCESSORS, 1) // limit the number of threads created
.put("http.enabled", false)
.put("index.store.type", "ram")
.put("config.ignore_system_properties", true) // make sure we get what we set :)
.put("gateway.type", "none")).build();
build.start();
assertThat(DiscoveryNode.localNode(build.settings()), is(true));
return build;
}
public static <T> T getInstanceFromNode(Class<T> clazz, Node node) {
return ((InternalNode) node).injector().getInstance(clazz);
}
public static IndexService createIndex(String index) {
return createIndex(index, ImmutableSettings.EMPTY);
}
public static IndexService createIndex(String index, Settings settings) {
assertAcked(node.client().admin().indices().prepareCreate(index).setSettings(settings).get());
// Wait for the index to be allocated so that cluster state updates don't override
// changes that would have been done locally
ClusterHealthResponse health = node.client().admin().cluster()
.health(Requests.clusterHealthRequest(index).waitForYellowStatus().waitForEvents(Priority.LANGUID).waitForRelocatingShards(0)).actionGet();
assertThat(health.getStatus(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW));
assertThat("Cluster must be a single node cluster", health.getNumberOfDataNodes(), equalTo(1));
IndicesService instanceFromNode = getInstanceFromNode(IndicesService.class, node);
return instanceFromNode.indexServiceSafe(index);
}
}
| src/test/java/org/elasticsearch/test/ElasticsearchSingleNodeTest.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.node.Node;
import org.elasticsearch.node.NodeBuilder;
import org.elasticsearch.node.internal.InternalNode;
import org.junit.After;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
* A test that keep a single node started for all tests that can be used to get
* references to Guice injectors in unit tests.
*/
public class ElasticsearchSingleNodeTest extends ElasticsearchTestCase {
private static final Node node = node();
@After
public void after() {
node.client().admin().indices().prepareDelete("*").get();
MetaData metaData = node.client().admin().cluster().prepareState().get().getState().getMetaData();
assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().getAsMap(),
metaData.persistentSettings().getAsMap().size(), equalTo(0));
assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().getAsMap(),
metaData.transientSettings().getAsMap().size(), equalTo(0));
}
/**
* Same as {@link #node(Settings) node(ImmutableSettings.EMPTY)}.
*/
public static Node node() {
return node(ImmutableSettings.EMPTY);
}
public static Node node(Settings settings) {
return NodeBuilder.nodeBuilder().local(true).data(true).settings(ImmutableSettings.builder()
.put(ClusterName.SETTING, ElasticsearchSingleNodeTest.class.getName())
.put("node.name", ElasticsearchSingleNodeTest.class.getName())
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put(settings)
.put(EsExecutors.PROCESSORS, 1) // limit the number of threads created
.put("http.enabled", false)
.put("index.store.type", "ram")
.put("gateway.type", "none")).build().start();
}
public static <T> T getInstanceFromNode(Class<T> clazz, Node node) {
return ((InternalNode) node).injector().getInstance(clazz);
}
public static IndexService createIndex(String index) {
return createIndex(index, ImmutableSettings.EMPTY);
}
public static IndexService createIndex(String index, Settings settings) {
node.client().admin().indices().prepareCreate(index).setSettings(settings).get();
// Wait for the index to be allocated so that cluster state updates don't override
// changes that would have been done locally
ClusterHealthResponse health = node.client().admin().cluster()
.health(Requests.clusterHealthRequest(index).waitForYellowStatus().waitForEvents(Priority.LANGUID).waitForRelocatingShards(0)).actionGet();
assertThat(health.getStatus(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW));
IndicesService instanceFromNode = getInstanceFromNode(IndicesService.class, node);
return instanceFromNode.indexService(index);
}
}
| [TEST] Enforce provided settings in test base classes
Enviorment variables might override the tests settings even if
they are explicitly set. Other base classes like InternalTestCluster
also specify `config.ignore_system_properties: true` to ensure `what
we set is what we get`
| src/test/java/org/elasticsearch/test/ElasticsearchSingleNodeTest.java | [TEST] Enforce provided settings in test base classes |
|
Java | apache-2.0 | c750efbfca2c7f899d62e22fe11fe70fc94e2840 | 0 | pubudu538/carbon-apimgt,malinthaprasan/carbon-apimgt,wso2/carbon-apimgt,prasa7/carbon-apimgt,malinthaprasan/carbon-apimgt,chamindias/carbon-apimgt,isharac/carbon-apimgt,malinthaprasan/carbon-apimgt,bhathiya/carbon-apimgt,tharindu1st/carbon-apimgt,harsha89/carbon-apimgt,praminda/carbon-apimgt,Rajith90/carbon-apimgt,uvindra/carbon-apimgt,praminda/carbon-apimgt,tharikaGitHub/carbon-apimgt,pubudu538/carbon-apimgt,uvindra/carbon-apimgt,harsha89/carbon-apimgt,praminda/carbon-apimgt,Rajith90/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,chamilaadhi/carbon-apimgt,malinthaprasan/carbon-apimgt,nuwand/carbon-apimgt,chamindias/carbon-apimgt,isharac/carbon-apimgt,prasa7/carbon-apimgt,ruks/carbon-apimgt,tharindu1st/carbon-apimgt,prasa7/carbon-apimgt,chamilaadhi/carbon-apimgt,jaadds/carbon-apimgt,chamindias/carbon-apimgt,pubudu538/carbon-apimgt,isharac/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,bhathiya/carbon-apimgt,tharindu1st/carbon-apimgt,harsha89/carbon-apimgt,jaadds/carbon-apimgt,ruks/carbon-apimgt,bhathiya/carbon-apimgt,ruks/carbon-apimgt,ruks/carbon-apimgt,jaadds/carbon-apimgt,tharikaGitHub/carbon-apimgt,tharikaGitHub/carbon-apimgt,Rajith90/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,pubudu538/carbon-apimgt,fazlan-nazeem/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,harsha89/carbon-apimgt,fazlan-nazeem/carbon-apimgt,tharikaGitHub/carbon-apimgt,jaadds/carbon-apimgt,wso2/carbon-apimgt,wso2/carbon-apimgt,isharac/carbon-apimgt,chamilaadhi/carbon-apimgt,uvindra/carbon-apimgt,uvindra/carbon-apimgt,nuwand/carbon-apimgt,nuwand/carbon-apimgt,fazlan-nazeem/carbon-apimgt,chamilaadhi/carbon-apimgt,prasa7/carbon-apimgt,bhathiya/carbon-apimgt,Rajith90/carbon-apimgt,tharindu1st/carbon-apimgt,wso2/carbon-apimgt,chamindias/carbon-apimgt,nuwand/carbon-apimgt,fazlan-nazeem/carbon-apimgt | package org.wso2.carbon.apimgt.impl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONException;
import org.json.JSONObject;
import org.wso2.carbon.apimgt.api.LoginPostExecutor;
import org.wso2.carbon.apimgt.api.NewPostLoginExecutor;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.base.MultitenantConstants;
import org.wso2.carbon.user.core.UserRealm;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
public class DefaultGroupIDExtractorImpl implements NewPostLoginExecutor {
private static final Log log = LogFactory.getLog(DefaultGroupIDExtractorImpl.class);
public String getGroupingIdentifiers(String loginResponse){
JSONObject obj;
String username = null;
Boolean isSuperTenant;
int tenantId = MultitenantConstants.SUPER_TENANT_ID;
String tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
String claim = "http://wso2.org/claims/organization";
String organization = null;
try {
obj = new JSONObject(loginResponse);
username = (String)obj.get("user");
isSuperTenant= (Boolean)obj.get("isSuperTenant");
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
//if the user is not in the super tenant domain then find the domain name and tenant id.
if(!isSuperTenant){
tenantDomain = MultitenantUtils.getTenantDomain(username);
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
}
UserRealm realm = (UserRealm) realmService.getTenantUserRealm(tenantId);
UserStoreManager manager = realm.getUserStoreManager();
organization =
manager.getUserClaimValue(MultitenantUtils.getTenantAwareUsername(username), claim, null);
if (organization != null) {
organization = tenantDomain + "/" + organization.trim();
}
} catch (JSONException e) {
log.error("Exception occured while trying to get group Identifier from login response", e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("Error while checking user existence for " + username, e);
}
return organization;
}
@Override
public String[] getGroupingIdentifierList(String loginResponse) {
JSONObject obj;
String username = null;
Boolean isSuperTenant;
int tenantId = MultitenantConstants.SUPER_TENANT_ID;
String tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
String claim = "http://wso2.org/claims/organization";
String organization = null;
String[] groupIdArray = null;
try {
obj = new JSONObject(loginResponse);
username = (String) obj.get("user");
isSuperTenant = (Boolean) obj.get("isSuperTenant");
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
//if the user is not in the super tenant domain then find the domain name and tenant id.
if (!isSuperTenant) {
tenantDomain = MultitenantUtils.getTenantDomain(username);
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
}
UserRealm realm = (UserRealm) realmService.getTenantUserRealm(tenantId);
UserStoreManager manager = realm.getUserStoreManager();
organization =
manager.getUserClaimValue(MultitenantUtils.getTenantAwareUsername(username), claim, null);
if (organization != null) {
if (organization.contains(",")) {
groupIdArray = organization.split(",");
for (int i = 0; i < groupIdArray.length; i++) {
groupIdArray[i] = groupIdArray[i].toString();
}
} else {
organization = organization.trim();
groupIdArray = new String[] {organization};
}
} else {
// If claim is null then returning a empty string
groupIdArray = new String[] {};
}
} catch (JSONException e) {
log.error("Exception occured while trying to get group Identifier from login response", e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("Error while checking user existence for " + username, e);
}
return groupIdArray;
}
}
| components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/DefaultGroupIDExtractorImpl.java | package org.wso2.carbon.apimgt.impl;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONException;
import org.json.JSONObject;
import org.wso2.carbon.apimgt.api.LoginPostExecutor;
import org.wso2.carbon.apimgt.api.NewPostLoginExecutor;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.base.MultitenantConstants;
import org.wso2.carbon.user.core.UserRealm;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
public class DefaultGroupIDExtractorImpl implements NewPostLoginExecutor {
private static final Log log = LogFactory.getLog(DefaultGroupIDExtractorImpl.class);
public String getGroupingIdentifiers(String loginResponse){
JSONObject obj;
String username = null;
Boolean isSuperTenant;
int tenantId = MultitenantConstants.SUPER_TENANT_ID;
String tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
String claim = "http://wso2.org/claims/organization";
String organization = null;
try {
obj = new JSONObject(loginResponse);
username = (String)obj.get("user");
isSuperTenant= (Boolean)obj.get("isSuperTenant");
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
//if the user is not in the super tenant domain then find the domain name and tenant id.
if(!isSuperTenant){
tenantDomain = MultitenantUtils.getTenantDomain(username);
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
}
UserRealm realm = (UserRealm) realmService.getTenantUserRealm(tenantId);
UserStoreManager manager = realm.getUserStoreManager();
organization =
manager.getUserClaimValue(MultitenantUtils.getTenantAwareUsername(username), claim, null);
if (organization != null) {
organization = tenantDomain + "/" + organization.trim();
}
} catch (JSONException e) {
log.error("Exception occured while trying to get group Identifier from login response", e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("Error while checking user existence for " + username, e);
}
return organization;
}
@Override
public String[] getGroupingIdentifierList(String loginResponse) {
JSONObject obj;
String username = null;
Boolean isSuperTenant;
int tenantId = MultitenantConstants.SUPER_TENANT_ID;
String tenantDomain = MultitenantConstants.SUPER_TENANT_DOMAIN_NAME;
String claim = "http://wso2.org/claims/organization";
String organization = null;
String[] groupIdArray = null;
try {
obj = new JSONObject(loginResponse);
username = (String)obj.get("user");
isSuperTenant= (Boolean)obj.get("isSuperTenant");
RealmService realmService = ServiceReferenceHolder.getInstance().getRealmService();
//if the user is not in the super tenant domain then find the domain name and tenant id.
if(!isSuperTenant){
tenantDomain = MultitenantUtils.getTenantDomain(username);
tenantId = ServiceReferenceHolder.getInstance().getRealmService().getTenantManager()
.getTenantId(tenantDomain);
}
UserRealm realm = (UserRealm) realmService.getTenantUserRealm(tenantId);
UserStoreManager manager = realm.getUserStoreManager();
organization =
manager.getUserClaimValue(MultitenantUtils.getTenantAwareUsername(username), claim, null);
if (organization != null) {
if(organization.contains(",")){
groupIdArray = organization.split(",");
for (int i = 0; i < groupIdArray.length; i++) {
groupIdArray[i] = groupIdArray[i].toString();
}
}else {
organization = organization.trim();
groupIdArray = new String[]{organization};
}
}else {
// If claim is null then returning a empty string
groupIdArray = new String[]{};
}
} catch (JSONException e) {
log.error("Exception occured while trying to get group Identifier from login response", e);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.error("Error while checking user existence for " + username, e);
}
return groupIdArray;
}
}
| format code
| components/apimgt/org.wso2.carbon.apimgt.impl/src/main/java/org/wso2/carbon/apimgt/impl/DefaultGroupIDExtractorImpl.java | format code |
|
Java | apache-2.0 | dca63c3b4cd6a851bc498e6cd722a09e14f2c04d | 0 | localmatters/lesscss4j,localmatters/lesscss4j | /**
* File: LessCssCompilerFactoryBean.java
*
* Author: David Hay ([email protected])
* Creation Date: May 18, 2010
* Creation Time: 10:47:16 AM
*
* Copyright 2010 Local Matters, Inc.
* All Rights Reserved
*
* Last checkin:
* $Author$
* $Revision$
* $Date$
*/
package org.lesscss4j.spring;
import org.lesscss4j.compile.LessCssCompiler;
import org.lesscss4j.compile.LessCssCompilerImpl;
import org.lesscss4j.factory.StyleSheetFactory;
import org.lesscss4j.output.PrettyPrintOptions;
import org.lesscss4j.output.StyleSheetWriter;
import org.lesscss4j.output.StyleSheetWriterImpl;
import org.lesscss4j.parser.LessCssStyleSheetParser;
import org.lesscss4j.parser.StyleSheetParser;
import org.lesscss4j.parser.StyleSheetResourceLoader;
import org.lesscss4j.transform.manager.TransformerManager;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
/**
* FactoryBean that makes it easier to configure the LessCssCompiler without having to redefine the parser and writer
* instances created by default by the compiler class.
*/
public class LessCssCompilerFactoryBean implements FactoryBean, InitializingBean {
private String _defaultEncoding;
private Integer _initialBufferSize;
private Integer _readBufferSize;
private Boolean _prettyPrintEnabled;
private PrettyPrintOptions _prettyPrintOptions;
private StyleSheetResourceLoader _styleSheetResourceLoader;
private TransformerManager _transformerManager;
private LessCssCompilerImpl _compilerInstance;
public TransformerManager getTransformerManager() {
return _transformerManager;
}
public void setTransformerManager(TransformerManager transformerManager) {
_transformerManager = transformerManager;
}
public StyleSheetResourceLoader getStyleSheetResourceLoader() {
return _styleSheetResourceLoader;
}
public void setStyleSheetResourceLoader(StyleSheetResourceLoader styleSheetResourceLoader) {
_styleSheetResourceLoader = styleSheetResourceLoader;
}
public String getDefaultEncoding() {
return _defaultEncoding;
}
public void setDefaultEncoding(String defaultEncoding) {
_defaultEncoding = defaultEncoding;
maybeReinitialize();
}
public Integer getInitialBufferSize() {
return _initialBufferSize;
}
public void setInitialBufferSize(Integer initialBufferSize) {
_initialBufferSize = initialBufferSize;
maybeReinitialize();
}
public Integer getReadBufferSize() {
return _readBufferSize;
}
public void setReadBufferSize(Integer readBufferSize) {
_readBufferSize = readBufferSize;
maybeReinitialize();
}
public Boolean getPrettyPrintEnabled() {
return _prettyPrintEnabled;
}
public void setPrettyPrintEnabled(Boolean prettyPrintEnabled) {
_prettyPrintEnabled = prettyPrintEnabled;
maybeReinitialize();
}
public void setCompressionEnabled(Boolean compressionEnabled) {
setPrettyPrintEnabled(compressionEnabled == null ? null : !compressionEnabled);
maybeReinitialize();
}
public Boolean isCompressionEnabled() {
return getPrettyPrintEnabled() == null ? null : !getPrettyPrintEnabled();
}
public PrettyPrintOptions getPrettyPrintOptions() {
return _prettyPrintOptions;
}
public void setPrettyPrintOptions(PrettyPrintOptions prettyPrintOptions) {
_prettyPrintOptions = prettyPrintOptions;
maybeReinitialize();
}
@Override
public void afterPropertiesSet() throws Exception {
getObject();
}
@Override
public Class getObjectType() {
return LessCssCompiler.class;
}
@Override
public Object getObject() throws Exception {
if (_compilerInstance == null) {
_compilerInstance = (LessCssCompilerImpl) createInstance();
}
return _compilerInstance;
}
@Override
public boolean isSingleton() {
return true;
}
protected Object createInstance() throws Exception {
LessCssCompilerImpl compiler = new LessCssCompilerImpl();
initializeCompiler(compiler);
return compiler;
}
private void maybeReinitialize() {
if (isSingleton() && _compilerInstance != null) {
initializeCompiler(_compilerInstance);
}
}
private void initializeCompiler(LessCssCompilerImpl compiler) {
initializeParser(compiler.getStyleSheetParser());
initializeWriter(compiler.getStyleSheetWriter());
initializeTransformerManager(compiler);
}
protected void initializeTransformerManager(LessCssCompilerImpl compiler) {
TransformerManager transformerManager = getTransformerManager();
if (transformerManager != null) {
compiler.setTransformerManager(transformerManager);
}
}
protected void initializeWriter(StyleSheetWriter styleSheetWriter) {
StyleSheetWriterImpl writer = (StyleSheetWriterImpl) styleSheetWriter;
if (getPrettyPrintEnabled() != null) {
writer.setPrettyPrintEnabled(getPrettyPrintEnabled());
}
if (getPrettyPrintOptions() != null) {
writer.setPrettyPrintOptions(getPrettyPrintOptions());
}
if (getDefaultEncoding() != null) {
writer.setDefaultEncoding(getDefaultEncoding());
}
}
protected void initializeParser(StyleSheetParser styleSheetParser) {
LessCssStyleSheetParser parser = (LessCssStyleSheetParser) styleSheetParser;
if (getDefaultEncoding() != null) {
parser.setDefaultEncoding(getDefaultEncoding());
}
if (getInitialBufferSize() != null) {
parser.setInitialBufferSize(getInitialBufferSize());
}
if (getInitialBufferSize() != null) {
parser.setInitialBufferSize(getInitialBufferSize());
}
if (getStyleSheetResourceLoader() != null) {
((StyleSheetFactory) parser.getStyleSheetFactory())
.setStyleSheetResourceLoader(getStyleSheetResourceLoader());
}
}
}
| src/main/java/org/lesscss4j/spring/LessCssCompilerFactoryBean.java | /**
* File: LessCssCompilerFactoryBean.java
*
* Author: David Hay ([email protected])
* Creation Date: May 18, 2010
* Creation Time: 10:47:16 AM
*
* Copyright 2010 Local Matters, Inc.
* All Rights Reserved
*
* Last checkin:
* $Author$
* $Revision$
* $Date$
*/
package org.lesscss4j.spring;
import org.lesscss4j.compile.LessCssCompiler;
import org.lesscss4j.compile.LessCssCompilerImpl;
import org.lesscss4j.factory.StyleSheetFactory;
import org.lesscss4j.output.PrettyPrintOptions;
import org.lesscss4j.output.StyleSheetWriter;
import org.lesscss4j.output.StyleSheetWriterImpl;
import org.lesscss4j.parser.LessCssStyleSheetParser;
import org.lesscss4j.parser.StyleSheetParser;
import org.lesscss4j.parser.StyleSheetResourceLoader;
import org.lesscss4j.transform.manager.TransformerManager;
import org.springframework.beans.factory.config.AbstractFactoryBean;
/**
* FactoryBean that makes it easier to configure the LessCssCompiler without having to redefine the parser and writer
* instances created by default by the compiler class.
*/
public class LessCssCompilerFactoryBean extends AbstractFactoryBean {
private String _defaultEncoding;
private Integer _initialBufferSize;
private Integer _readBufferSize;
private Boolean _prettyPrintEnabled;
private PrettyPrintOptions _prettyPrintOptions;
private StyleSheetResourceLoader _styleSheetResourceLoader;
private TransformerManager _transformerManager;
public TransformerManager getTransformerManager() {
return _transformerManager;
}
public void setTransformerManager(TransformerManager transformerManager) {
_transformerManager = transformerManager;
}
public StyleSheetResourceLoader getStyleSheetResourceLoader() {
return _styleSheetResourceLoader;
}
public void setStyleSheetResourceLoader(StyleSheetResourceLoader styleSheetResourceLoader) {
_styleSheetResourceLoader = styleSheetResourceLoader;
}
public String getDefaultEncoding() {
return _defaultEncoding;
}
public void setDefaultEncoding(String defaultEncoding) {
_defaultEncoding = defaultEncoding;
}
public Integer getInitialBufferSize() {
return _initialBufferSize;
}
public void setInitialBufferSize(Integer initialBufferSize) {
_initialBufferSize = initialBufferSize;
}
public Integer getReadBufferSize() {
return _readBufferSize;
}
public void setReadBufferSize(Integer readBufferSize) {
_readBufferSize = readBufferSize;
}
public Boolean getPrettyPrintEnabled() {
return _prettyPrintEnabled;
}
public void setPrettyPrintEnabled(Boolean prettyPrintEnabled) {
_prettyPrintEnabled = prettyPrintEnabled;
}
public void setCompressionEnabled(Boolean compressionEnabled) {
setPrettyPrintEnabled(compressionEnabled == null ? null : !compressionEnabled);
}
public Boolean isCompressionEnabled() {
return getPrettyPrintEnabled() == null ? null : !getPrettyPrintEnabled();
}
public PrettyPrintOptions getPrettyPrintOptions() {
return _prettyPrintOptions;
}
public void setPrettyPrintOptions(PrettyPrintOptions prettyPrintOptions) {
_prettyPrintOptions = prettyPrintOptions;
}
@Override
public Class getObjectType() {
return LessCssCompiler.class;
}
@Override
protected Object createInstance() throws Exception {
LessCssCompilerImpl compiler = new LessCssCompilerImpl();
initializeParser(compiler.getStyleSheetParser());
initializeWriter(compiler.getStyleSheetWriter());
initializeTransformerManager(compiler);
return compiler;
}
protected void initializeTransformerManager(LessCssCompilerImpl compiler) {
TransformerManager transformerManager = getTransformerManager();
if (transformerManager != null) {
compiler.setTransformerManager(transformerManager);
}
}
protected void initializeWriter(StyleSheetWriter styleSheetWriter) {
StyleSheetWriterImpl writer = (StyleSheetWriterImpl) styleSheetWriter;
if (getPrettyPrintEnabled() != null) {
writer.setPrettyPrintEnabled(getPrettyPrintEnabled());
}
if (getPrettyPrintOptions() != null) {
writer.setPrettyPrintOptions(getPrettyPrintOptions());
}
if (getDefaultEncoding() != null) {
writer.setDefaultEncoding(getDefaultEncoding());
}
}
protected void initializeParser(StyleSheetParser styleSheetParser) {
LessCssStyleSheetParser parser = (LessCssStyleSheetParser) styleSheetParser;
if (getDefaultEncoding() != null) {
parser.setDefaultEncoding(getDefaultEncoding());
}
if (getInitialBufferSize() != null) {
parser.setInitialBufferSize(getInitialBufferSize());
}
if (getInitialBufferSize() != null) {
parser.setInitialBufferSize(getInitialBufferSize());
}
if (getStyleSheetResourceLoader() != null) {
((StyleSheetFactory) parser.getStyleSheetFactory())
.setStyleSheetResourceLoader(getStyleSheetResourceLoader());
}
}
}
| Updating properties in LessCssCompilerFactoryBean re-injects those properties into the singleton instance.
git-svn-id: 2eebe84b5b32eb059849d530d3c549d0da009da8@135459 cb919951-1609-0410-8833-993d306c94f7
| src/main/java/org/lesscss4j/spring/LessCssCompilerFactoryBean.java | Updating properties in LessCssCompilerFactoryBean re-injects those properties into the singleton instance. |
|
Java | apache-2.0 | c342bf3ab52741830b35343c47f6d6c36fc8b97c | 0 | DataArt/CalculationEngine,DataArt/CalculationEngine,DataArt/CalculationEngine | package com.dataart.spreadsheetanalytics.engine.execgraph;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.CELL_WITH_FORMULA;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.CELL_WITH_VALUE;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.CONSTANT_VALUE;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.EMPTY_CELL;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.FUNCTION;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.IF;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.OPERATOR;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.RANGE;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.isCell;
import static java.lang.String.format;
import static java.lang.String.join;
import static java.util.Arrays.asList;
import static java.util.stream.Collectors.toList;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.FORMULA_PTG;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.NAME;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.TYPE;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.VALUE;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.poi.common.execgraph.IExecutionGraphBuilder;
import org.apache.poi.common.execgraph.IExecutionGraphVertex;
import org.apache.poi.common.execgraph.IExecutionGraphVertexProperty;
import org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName;
import org.apache.poi.ss.formula.eval.ValueEval;
import org.apache.poi.ss.formula.functions.Area2DValues;
import org.apache.poi.ss.formula.ptg.AbstractFunctionPtg;
import org.apache.poi.ss.formula.ptg.AddPtg;
import org.apache.poi.ss.formula.ptg.AreaPtg;
import org.apache.poi.ss.formula.ptg.DividePtg;
import org.apache.poi.ss.formula.ptg.EqualPtg;
import org.apache.poi.ss.formula.ptg.GreaterThanPtg;
import org.apache.poi.ss.formula.ptg.LessThanPtg;
import org.apache.poi.ss.formula.ptg.MultiplyPtg;
import org.apache.poi.ss.formula.ptg.NameXPxg;
import org.apache.poi.ss.formula.ptg.NotEqualPtg;
import org.apache.poi.ss.formula.ptg.ParenthesisPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
import org.apache.poi.ss.formula.ptg.Ref3DPxg;
import org.apache.poi.ss.formula.ptg.RefPtg;
import org.apache.poi.ss.formula.ptg.ScalarConstantPtg;
import org.apache.poi.ss.formula.ptg.SubtractPtg;
import org.apache.poi.ss.formula.ptg.UnaryPlusPtg;
import org.apache.poi.ss.formula.ptg.ValueOperatorPtg;
import org.jgrapht.DirectedGraph;
import org.jgrapht.graph.DefaultDirectedGraph;
import org.jgrapht.graph.DefaultEdge;
import com.dataart.spreadsheetanalytics.api.model.ICellAddress;
import com.dataart.spreadsheetanalytics.api.model.ICellValue;
import com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type;
import com.dataart.spreadsheetanalytics.model.CellAddress;
import com.dataart.spreadsheetanalytics.model.CellFormulaExpression;
import com.dataart.spreadsheetanalytics.model.CellValue;
/**
* TODO: write about internal representation, not thread safe, one instance per
* calculation, etc.
*
*/
public class PoiExecutionGraphBuilder implements IExecutionGraphBuilder {
protected static final String CONSTANT_VALUE_NAME = "VALUE";
protected static final String UNDEFINED_EXTERNAL_FUNCTION = "#external#";
protected static final Set<String> POI_VALUE_REDUNDANT_SYMBOLS = new HashSet<>(Arrays.asList("[", "]"));
protected final DirectedGraph<IExecutionGraphVertex, DefaultEdge> dgraph;
protected Map<ValueEval, IExecutionGraphVertex> valueToVertex;
protected Map<String, Set<IExecutionGraphVertex>> addressToVertices;
public PoiExecutionGraphBuilder() {
this.dgraph = new DefaultDirectedGraph<>(DefaultEdge.class);
this.valueToVertex = new HashMap<>();
this.addressToVertices = new HashMap<>();
}
public ExecutionGraph get() {
return ExecutionGraph.wrap(dgraph);
}
public ExecutionGraph getSingleNodeGraph(ICellAddress address) {
DirectedGraph<IExecutionGraphVertex, DefaultEdge> emptyGraph = new DefaultDirectedGraph<>(DefaultEdge.class);
ExecutionGraphVertex vertex = new ExecutionGraphVertex(address.a1Address().address());
vertex.property(TYPE).set(EMPTY_CELL);
emptyGraph.addVertex(vertex);
return ExecutionGraph.wrap(emptyGraph);
}
/**
* This method should be used when creating a new vertex from a cell, so
* vertex name is a cell's address. New Vertex will be created any time this
* method is invoked. New vertex will be stored in
* address-to-set-of-vertices map.
*/
@Override
public IExecutionGraphVertex createVertex(String address) {
// create new vertex object
ExecutionGraphVertex v = new ExecutionGraphVertex(address);
// add vertex to actual graph
dgraph.addVertex(v);
// put new vertex to set of vertices with the same address, since they
// all must have the same set of properties and values
Set<IExecutionGraphVertex> vs = addressToVertices.containsKey(address) ? addressToVertices.get(address) : new HashSet<>();
vs.add(v);
addressToVertices.put(address, vs);
return v;
}
@Override
public IExecutionGraphVertex createVertex(Ptg ptg) {
if (isSkipVertex(ptg)) { return null; }
boolean isCell = ptg instanceof RefPtg;
String name = ptgToString(ptg);
if (isCell) { // cell
return createVertex(name);
} else { // operation
ExecutionGraphVertex v = new ExecutionGraphVertex(name);
dgraph.addVertex(v);
return v;
}
}
@Override
public void connect(IExecutionGraphVertex from, IExecutionGraphVertex to) {
dgraph.addEdge(from, to);
}
@Override
public void removeVertex(IExecutionGraphVertex vertex) {
if (vertex == null) { return; }
dgraph.removeVertex(vertex);
}
@Override
public void putVertexToCache(ValueEval value, IExecutionGraphVertex vertex) {
if (value == null) { throw new IllegalArgumentException("ValueEval to assosiate vertex with cannot be null."); }
valueToVertex.put(value, vertex);
}
@Override
public IExecutionGraphVertex getVertexFromCache(ValueEval value) {
if (value == null) { throw new IllegalArgumentException("ValueEval to assosiate vertex with cannot be null."); }
return valueToVertex.get(value);
}
@Override
public void putVertexToCache(String address, IExecutionGraphVertex vertex) {
if (!addressToVertices.containsKey(address)) { addressToVertices.put(address, new HashSet<>()); }
addressToVertices.get(address).add(vertex);
}
@Override
public Set<IExecutionGraphVertex> getVerticesFromCache(String address) {
return addressToVertices.get(address);
}
@Override
public void putVertexToCache(int row, int column, IExecutionGraphVertex vertex) {
putVertexToCache(CellAddress.toA1Address(row, column), vertex);
}
@Override
public Set<IExecutionGraphVertex> getVerticesFromCache(int row, int column) {
return getVerticesFromCache(CellAddress.toA1Address(row, column));
}
@Override
public IExecutionGraphVertexProperty getVertexProperty(IExecutionGraphVertex vertex, PropertyName property) {
return ((ExecutionGraphVertex) vertex).property(property);
}
/**
* Do anything you want here. After graph is completed and we are out of POI
* context you can add\remove\etc any information you want.
*/
public void runPostProcessing() {
DirectedGraph<IExecutionGraphVertex, DefaultEdge> graph = dgraph;
// make identical vertices have the same set of properties
// two vertices are identical if they have the same address value.
// Id for every vertex is unique, so this is not a flag here
for (String address : addressToVertices.keySet()) {
Set<IExecutionGraphVertex> vs = addressToVertices.get(address);
// the logic below is very fragile and based on some empirical model
// and may not work for other type of graphs
if (vs != null && vs.size() > 1) {
IExecutionGraphVertex standard = null;
for (IExecutionGraphVertex ivertex : vs) {
ExecutionGraphVertex vertex = (ExecutionGraphVertex) ivertex;
if (CELL_WITH_FORMULA == (Type) vertex.property(TYPE).get()) { standard = vertex; break; }
}
if (standard != null) {
copyProperties(standard, vs);
}
}
}
// copy or link subgraphs to identical vertices
// and
// modify Formula field with additional values
Map<String, AtomicInteger> adressToCount = new HashMap<>();
for (IExecutionGraphVertex ivertex : graph.vertexSet()) {
ExecutionGraphVertex vertex = (ExecutionGraphVertex) ivertex;
// restore/add subgraphs to identical vertices
Type type = (Type) vertex.property(TYPE).get();
if (isCell(type)) {
String address = (String) vertex.property(NAME).get();
adressToCount.putIfAbsent(address, new AtomicInteger(0));
if (adressToCount.get(address).incrementAndGet() > 1) { //count > 1
// need to link
Set<IExecutionGraphVertex> subgraphTops = new HashSet<>();
// TODO: point to optimize!
for (IExecutionGraphVertex itmpVertex : graph.vertexSet()) {
ExecutionGraphVertex tmpVertex = (ExecutionGraphVertex) itmpVertex;
String tmpAddress = (String) tmpVertex.property(NAME).get();
if (address.equals(tmpAddress)) { // check for subgraph
for (DefaultEdge tmpEdge : graph.incomingEdgesOf(tmpVertex)) {
subgraphTops.add(graph.getEdgeSource(tmpEdge));
}
}
}
for (IExecutionGraphVertex subVertex : subgraphTops) {
for (IExecutionGraphVertex vertexOfAddress : addressToVertices.get(address)) {
graph.addEdge(subVertex, vertexOfAddress);
}
}
}
}
/* Adding IF Value */
if (IF == type) {
Set<DefaultEdge> two = graph.incomingEdgesOf(vertex);
if (two.size() != 2) { throw new IllegalStateException("IF must have only two incoming edges."); }
Object ifBranchValue = null;
for (DefaultEdge e : two) {
ExecutionGraphVertex oneOfTwo = (ExecutionGraphVertex) graph.getEdgeSource(e);
if (!isCompareOperand(oneOfTwo.name())) {
ifBranchValue = oneOfTwo.property(VALUE).get();
break;
}
}
vertex.property(VALUE).set(ifBranchValue);
}
}
for (IExecutionGraphVertex vert : graph.vertexSet()) {
if (graph.outgoingEdgesOf(vert).isEmpty()) {
ExecutionGraphVertex root = (ExecutionGraphVertex) vert;
root.formula = buildFormula(root, graph);
break;
}
}
}
/* Modifications for: FORMULA */
// set formula_values to user-friendly string like: '1 + 2' or
// 'SUM(2,1)'
// For OPERATOR and FUNCTION types
protected CellFormulaExpression buildFormula(ExecutionGraphVertex vertex, DirectedGraph<IExecutionGraphVertex, DefaultEdge> graph) {
switch (vertex.type) {
case CELL_WITH_VALUE: {
CellFormulaExpression formula = (CellFormulaExpression) vertex.formula;
formula.formulaStr(vertex.property(NAME).get().toString());
formula.formulaValues(CellValue.fromCellValueToString(vertex.value()));
formula.formulaPtgStr(CellValue.fromCellValueToString(vertex.value()));
formula.ptgStr(vertex.property(NAME).get().toString());
if (vertex.property(VALUE).get().toString().isEmpty()) {
vertex.property(TYPE).set(EMPTY_CELL);
}
return formula;
}
case CELL_WITH_REFERENCE:
case CELL_WITH_FORMULA: {
DefaultEdge edge = graph.incomingEdgesOf(vertex).iterator().next();
ExecutionGraphVertex ivertex = (ExecutionGraphVertex) graph.getEdgeSource(edge);
CellFormulaExpression formula = buildFormula(ivertex, graph);
vertex.formula = CellFormulaExpression.copyOf(formula);
vertex.value = ivertex.value;
return CellFormulaExpression.copyOf(formula);
}
case OPERATOR:
case FUNCTION: {
Set<DefaultEdge> edges = graph.incomingEdgesOf(vertex);
List<String> formulaStringNodes = new LinkedList<>();
List<String> formulaValuesNodes = new LinkedList<>();
List<String> formulaPtgNodes = new LinkedList<>();
List<String> ptgNodes = new LinkedList<>();
Object[] formulaPtg = (Object[]) vertex.property(FORMULA_PTG).get();
for (DefaultEdge edge : edges) {
ExecutionGraphVertex ivertex = (ExecutionGraphVertex) graph.getEdgeSource(edge);
CellFormulaExpression formula = buildFormula(ivertex, graph);
formulaStringNodes.add(formula.formulaStr());
formulaValuesNodes.add(formula.formulaValues());
formulaPtgNodes.add(formula.formulaPtgStr());
ptgNodes.add(formula.ptgStr());
}
CellFormulaExpression iformula = (CellFormulaExpression) vertex.formula();
iformula.formulaStr(createFormulaString(formulaPtg[0], formulaStringNodes, vertex));
iformula.formulaValues(createFormulaString(formulaPtg[0], formulaValuesNodes, vertex));
iformula.formulaPtgStr(createPtgString(formulaPtg[0], formulaPtgNodes, vertex));
iformula.ptgStr(createPtgString(formulaPtg[0], ptgNodes, vertex));
CellFormulaExpression result = CellFormulaExpression.copyOf(iformula);
iformula.formulaPtgStr("");
iformula.ptgStr("");
return result;
}
case IF: {
Set<DefaultEdge> edges = graph.incomingEdgesOf(vertex);
List<String> formulaValuesNodes = new LinkedList<>();
List<String> formulaPtgNodes = new LinkedList<>();
List<String> ptgNodes = new LinkedList<>();
for (DefaultEdge edge : edges) {
ExecutionGraphVertex ivertex = (ExecutionGraphVertex) graph.getEdgeSource(edge);
CellFormulaExpression formula = buildFormula(ivertex, graph);
formulaValuesNodes.add(formula.formulaValues());
formulaPtgNodes.add(formula.formulaPtgStr());
ptgNodes.add(formula.ptgStr());
if (OPERATOR != ivertex.type) { vertex.value = ivertex.value; }
}
// TODO: are you sure you need only '=' ?
Collections.sort(formulaValuesNodes, (n1, n2) -> isCompareOperand(n1) ? -1 : 0);
CellFormulaExpression iformula = (CellFormulaExpression) vertex.formula;
iformula.formulaValues(createFormulaString(null, formulaValuesNodes, vertex));
iformula.formulaPtgStr(createPtgString(null, formulaPtgNodes, vertex));
iformula.ptgStr(createPtgString(null, ptgNodes, vertex));
CellFormulaExpression result = CellFormulaExpression.copyOf(iformula);
iformula.formulaPtgStr("");
iformula.ptgStr("");
return result;
}
case RANGE: {
CellFormulaExpression iformula = (CellFormulaExpression) vertex.formula();
iformula.formulaStr(vertex.property(NAME).get().toString());
iformula.formulaValues(vertex.property(VALUE).get().toString());
iformula.formulaPtgStr(vertex.property(VALUE).get().toString());
iformula.ptgStr(vertex.property(NAME).get().toString());
connectValuesToRange(vertex);
return iformula;
}
case CONSTANT_VALUE: {
vertex.property(NAME).set(CONSTANT_VALUE_NAME);
CellFormulaExpression formula = (CellFormulaExpression) vertex.formula;
formula.formulaStr(vertex.property(NAME).get().toString());
formula.formulaValues(CellValue.fromCellValueToString(vertex.value()));
formula.formulaPtgStr(CellValue.fromCellValueToString(vertex.value()));
formula.ptgStr(vertex.property(NAME).get().toString());
return CellFormulaExpression.copyOf(formula);
}
default: {
return (CellFormulaExpression) vertex.formula;
}
}
}
protected void connectValuesToRange(ExecutionGraphVertex rangeVertex) {
Object cellValue = ((CellValue) rangeVertex.value()).get();
if (cellValue instanceof Area2DValues) {
for (String adress : ((Area2DValues) cellValue).getRangeCellAddresses()) {
if (addressToVertices.get(adress) == null) { continue; }
for (IExecutionGraphVertex cellVertex : addressToVertices.get(adress)) {
connect(cellVertex, rangeVertex);
}
}
}
}
protected boolean isSkipVertex(Ptg ptg) {
return ptg instanceof ParenthesisPtg;
}
protected String createFormulaString(Object optg, List<String> ops, ExecutionGraphVertex vertex) {
String opname = "";
if (optg == null) { // IF
opname = "IF";
} else if (optg instanceof Ptg) {
opname = ptgToString((Ptg) optg);
if (UNDEFINED_EXTERNAL_FUNCTION.equals(opname)) {
/* if the function was not recognized as
internal function we use the node
name as the function name */
opname = vertex.name();
}
} else {
opname = optg.toString();
}
if (optg == null || optg instanceof AbstractFunctionPtg) {
return stripRedundantSymbols(format("%s(%s)",
opname,
join(",", asList(ops)
.stream()
.map(v -> v.toString())
.collect(toList()))));
} else if (optg instanceof ValueOperatorPtg) {
return stripRedundantSymbols(format("%s %s %s", (ops.size() > 0) ? ops.get(0) : "", opname, (ops.size() > 1) ? ops.get(1) : ""));
}
return "";
}
protected String createPtgString(Object optg, List<String> ops, ExecutionGraphVertex vertex) {
String opname = "";
if (optg == null) {
opname = "IF";
return stripRedundantSymbols(format("%s %s ",
join(",", asList(ops)
.stream()
.map(v -> v.toString())
.collect(toList())),
opname));
} else {
opname = optg instanceof Ptg ? ptgToString((Ptg) optg) : optg.toString();
/* if the function was not recognized as
internal function we use the node
name as the function name */
opname = UNDEFINED_EXTERNAL_FUNCTION.equals(opname) ? vertex.name() : opname;
}
if (optg instanceof AbstractFunctionPtg) {
return stripRedundantSymbols(format("%s %s ",
join(",", asList(ops)
.stream()
.map(v -> v.toString())
.collect(toList())),
opname));
} else if (optg instanceof ValueOperatorPtg) {
return stripRedundantSymbols(String.format("%s %s %s", (ops.size() > 0) ? ops.get(0) : "", (ops.size() > 1) ? ops.get(1) : "", opname));
}
return "";
}
protected static String stripRedundantSymbols(String inline) {
for (String token : POI_VALUE_REDUNDANT_SYMBOLS) {
inline.replace(token, "");
}
return inline;
}
public static String ptgToString(Ptg ptg) {
Class<? extends Ptg> ptgCls = ptg.getClass();
if (ptgCls.isAssignableFrom(AddPtg.class)) {
return "+";
} else if (ptgCls.isAssignableFrom(SubtractPtg.class)) {
return "-";
} else if (ptgCls.isAssignableFrom(DividePtg.class)) {
return "/";
} else if (ptgCls.isAssignableFrom(MultiplyPtg.class)) {
return "*";
} else if (ptgCls.isAssignableFrom(EqualPtg.class)) {
return "=";
} else if (ptgCls.isAssignableFrom(GreaterThanPtg.class)) {
return ">";
} else if (ptgCls.isAssignableFrom(LessThanPtg.class)) {
return "<";
} else if (ptgCls.isAssignableFrom(NotEqualPtg.class)) {
return "<>";
} else if (ptgCls.isAssignableFrom(UnaryPlusPtg.class)) {
return "+";
}
try {
return ptg.toFormulaString();
} catch (Exception e) {
return ptg.getClass().getSimpleName();
}
}
public static Type ptgToVertexType(Ptg ptg) {
if (ptg instanceof AbstractFunctionPtg) { // functions: SUM, COUNT, COS, etc.
return FUNCTION;
} else if (ptg instanceof ValueOperatorPtg) { // single operators: +, -, /, *, =
return OPERATOR;
} else if (ptg instanceof RefPtg || ptg instanceof Ref3DPxg || ptg instanceof NameXPxg) {
return CELL_WITH_VALUE;
} else if (ptg instanceof ScalarConstantPtg) {
return CONSTANT_VALUE;
} else if (ptg instanceof AreaPtg) {
return RANGE;
}
// TODO: add more for our cases
throw new IllegalArgumentException("Unsupported Ptg class: " + ptg.getClass());
}
/**
* Does copy of all properties for every Vertex from @param vertices. the
* first @param istandard is used as object to copy from.
*/
protected static void copyProperties(IExecutionGraphVertex istandard, Set<IExecutionGraphVertex> vertices) {
ExecutionGraphVertex standard = (ExecutionGraphVertex) istandard;
for (IExecutionGraphVertex ivertex : vertices) {
if (istandard == ivertex) { continue; }
ExecutionGraphVertex vertex = (ExecutionGraphVertex) ivertex;
// copy properties
for (PropertyName pname : PropertyName.values()) {
if (pname == PropertyName.VERTEX_ID) continue;
if (pname == PropertyName.INDEX_IN_FORMULA) continue;
vertex.property(pname).set(standard.property(pname).get());
}
}
}
// TODO: not the best solution, but works as for now
protected static boolean isCompareOperand(String name) {
return name.contains("=") || name.contains("<") || name.contains(">") || name.contains("<>") || name.contains("=>") || name.contains("<=");
}
}
| calculation-engine/engine-core/src/main/java/com/dataart/spreadsheetanalytics/engine/execgraph/PoiExecutionGraphBuilder.java | package com.dataart.spreadsheetanalytics.engine.execgraph;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.CELL_WITH_FORMULA;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.CELL_WITH_VALUE;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.CONSTANT_VALUE;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.EMPTY_CELL;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.FUNCTION;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.IF;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.OPERATOR;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.RANGE;
import static com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type.isCell;
import static java.lang.String.format;
import static java.lang.String.join;
import static java.util.Arrays.asList;
import static java.util.stream.Collectors.toList;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.FORMULA_PTG;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.NAME;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.TYPE;
import static org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName.VALUE;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.poi.common.execgraph.IExecutionGraphBuilder;
import org.apache.poi.common.execgraph.IExecutionGraphVertex;
import org.apache.poi.common.execgraph.IExecutionGraphVertexProperty;
import org.apache.poi.common.execgraph.IExecutionGraphVertexProperty.PropertyName;
import org.apache.poi.ss.formula.eval.ValueEval;
import org.apache.poi.ss.formula.functions.Area2DValues;
import org.apache.poi.ss.formula.ptg.AbstractFunctionPtg;
import org.apache.poi.ss.formula.ptg.AddPtg;
import org.apache.poi.ss.formula.ptg.AreaPtg;
import org.apache.poi.ss.formula.ptg.DividePtg;
import org.apache.poi.ss.formula.ptg.EqualPtg;
import org.apache.poi.ss.formula.ptg.GreaterThanPtg;
import org.apache.poi.ss.formula.ptg.LessThanPtg;
import org.apache.poi.ss.formula.ptg.MultiplyPtg;
import org.apache.poi.ss.formula.ptg.NameXPxg;
import org.apache.poi.ss.formula.ptg.NotEqualPtg;
import org.apache.poi.ss.formula.ptg.ParenthesisPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
import org.apache.poi.ss.formula.ptg.Ref3DPxg;
import org.apache.poi.ss.formula.ptg.RefPtg;
import org.apache.poi.ss.formula.ptg.ScalarConstantPtg;
import org.apache.poi.ss.formula.ptg.SubtractPtg;
import org.apache.poi.ss.formula.ptg.UnaryPlusPtg;
import org.apache.poi.ss.formula.ptg.ValueOperatorPtg;
import org.jgrapht.DirectedGraph;
import org.jgrapht.graph.DefaultDirectedGraph;
import org.jgrapht.graph.DefaultEdge;
import com.dataart.spreadsheetanalytics.api.model.ICellAddress;
import com.dataart.spreadsheetanalytics.api.model.IExecutionGraphVertex.Type;
import com.dataart.spreadsheetanalytics.model.CellAddress;
import com.dataart.spreadsheetanalytics.model.CellFormulaExpression;
import com.dataart.spreadsheetanalytics.model.CellValue;
/**
* TODO: write about internal representation, not thread safe, one instance per
* calculation, etc.
*
*/
public class PoiExecutionGraphBuilder implements IExecutionGraphBuilder {
protected static final String CONSTANT_VALUE_NAME = "VALUE";
protected static final String UNDEFINED_EXTERNAL_FUNCTION = "#external#";
protected final DirectedGraph<IExecutionGraphVertex, DefaultEdge> dgraph;
protected Map<ValueEval, IExecutionGraphVertex> valueToVertex;
protected Map<String, Set<IExecutionGraphVertex>> addressToVertices;
public PoiExecutionGraphBuilder() {
this.dgraph = new DefaultDirectedGraph<>(DefaultEdge.class);
this.valueToVertex = new HashMap<>();
this.addressToVertices = new HashMap<>();
}
public ExecutionGraph get() {
return ExecutionGraph.wrap(dgraph);
}
public ExecutionGraph getSingleNodeGraph(ICellAddress address) {
DirectedGraph<IExecutionGraphVertex, DefaultEdge> emptyGraph = new DefaultDirectedGraph<>(DefaultEdge.class);
ExecutionGraphVertex vertex = new ExecutionGraphVertex(address.a1Address().address());
vertex.property(TYPE).set(EMPTY_CELL);
emptyGraph.addVertex(vertex);
return ExecutionGraph.wrap(emptyGraph);
}
/**
* This method should be used when creating a new vertex from a cell, so
* vertex name is a cell's address. New Vertex will be created any time this
* method is invoked. New vertex will be stored in address-to-set-of-vertices map.
*/
@Override
public IExecutionGraphVertex createVertex(String address) {
// create new vertex object
ExecutionGraphVertex v = new ExecutionGraphVertex(address);
// add vertex to actual graph
dgraph.addVertex(v);
// put new vertex to set of vertices with the same address, since they
// all must have the same set of properties and values
Set<IExecutionGraphVertex> vs = addressToVertices.containsKey(address) ? addressToVertices.get(address) : new HashSet<>();
vs.add(v);
addressToVertices.put(address, vs);
return v;
}
@Override
public IExecutionGraphVertex createVertex(Ptg ptg) {
if (isSkipVertex(ptg)) { return null; }
boolean isCell = ptg instanceof RefPtg;
String name = ptgToString(ptg);
if (isCell) { // cell
return createVertex(name);
} else { // operation
ExecutionGraphVertex v = new ExecutionGraphVertex(name);
dgraph.addVertex(v);
return v;
}
}
@Override
public void connect(IExecutionGraphVertex from, IExecutionGraphVertex to) {
dgraph.addEdge(from, to);
}
@Override
public void removeVertex(IExecutionGraphVertex vertex) {
if (vertex == null) { return; }
dgraph.removeVertex(vertex);
}
@Override
public void putVertexToCache(ValueEval value, IExecutionGraphVertex vertex) {
if (value == null) { throw new IllegalArgumentException("ValueEval to assosiate vertex with cannot be null."); }
valueToVertex.put(value, vertex);
}
@Override
public IExecutionGraphVertex getVertexFromCache(ValueEval value) {
if (value == null) { throw new IllegalArgumentException("ValueEval to assosiate vertex with cannot be null."); }
return valueToVertex.get(value);
}
@Override
public void putVertexToCache(String address, IExecutionGraphVertex vertex) {
if (!addressToVertices.containsKey(address)) { addressToVertices.put(address, new HashSet<>()); }
addressToVertices.get(address).add(vertex);
}
@Override
public Set<IExecutionGraphVertex> getVerticesFromCache(String address) {
return addressToVertices.get(address);
}
@Override
public void putVertexToCache(int row, int column, IExecutionGraphVertex vertex) {
putVertexToCache(CellAddress.toA1Address(row, column), vertex);
}
@Override
public Set<IExecutionGraphVertex> getVerticesFromCache(int row, int column) {
return getVerticesFromCache(CellAddress.toA1Address(row, column));
}
@Override
public IExecutionGraphVertexProperty getVertexProperty(IExecutionGraphVertex vertex, PropertyName property) {
return ((ExecutionGraphVertex) vertex).property(property);
}
/**
* Do anything you want here. After graph is completed and we are out of POI
* context you can add\remove\etc any information you want.
*/
public void runPostProcessing() {
DirectedGraph<IExecutionGraphVertex, DefaultEdge> graph = dgraph;
// make identical vertices have the same set of properties
// two vertices are identical if they have the same address value.
// Id for every vertex is unique, so this is not a flag here
for (String address : addressToVertices.keySet()) {
Set<IExecutionGraphVertex> vs = addressToVertices.get(address);
// the logic below is very fragile and based on some empirical model
// and may not work for other type of graphs
if (vs != null && vs.size() > 1) {
IExecutionGraphVertex standard = null;
for (IExecutionGraphVertex ivertex : vs) {
ExecutionGraphVertex vertex = (ExecutionGraphVertex) ivertex;
if (CELL_WITH_FORMULA == (Type) vertex.property(TYPE).get()) { standard = vertex; break; }
}
if (standard != null) {
copyProperties(standard, vs);
}
}
}
// copy or link subgraphs to identical vertices
// and
// modify Formula field with additional values
Map<String, AtomicInteger> adressToCount = new HashMap<>();
for (IExecutionGraphVertex ivertex : graph.vertexSet()) {
ExecutionGraphVertex vertex = (ExecutionGraphVertex) ivertex;
// restore/add subgraphs to identical vertices
Type type = (Type) vertex.property(TYPE).get();
if (isCell(type)) {
String address = (String) vertex.property(NAME).get();
adressToCount.putIfAbsent(address, new AtomicInteger(0));
if (adressToCount.get(address).incrementAndGet() > 1) { //count > 1
// need to link
Set<IExecutionGraphVertex> subgraphTops = new HashSet<>();
// TODO: point to optimize!
for (IExecutionGraphVertex itmpVertex : graph.vertexSet()) {
ExecutionGraphVertex tmpVertex = (ExecutionGraphVertex) itmpVertex;
String tmpAddress = (String) tmpVertex.property(NAME).get();
if (address.equals(tmpAddress)) { // check for subgraph
for (DefaultEdge tmpEdge : graph.incomingEdgesOf(tmpVertex)) {
subgraphTops.add(graph.getEdgeSource(tmpEdge));
}
}
}
for (IExecutionGraphVertex subVertex : subgraphTops) {
for (IExecutionGraphVertex vertexOfAddress : addressToVertices.get(address)) {
graph.addEdge(subVertex, vertexOfAddress);
}
}
}
}
/* Adding IF Value */
if (IF == type) {
Set<DefaultEdge> two = graph.incomingEdgesOf(vertex);
if (two.size() != 2) { throw new IllegalStateException("IF must have only two incoming edges."); }
Object ifBranchValue = null;
for (DefaultEdge e : two) {
ExecutionGraphVertex oneOfTwo = (ExecutionGraphVertex) graph.getEdgeSource(e);
if (!isCompareOperand(oneOfTwo.name())) {
ifBranchValue = oneOfTwo.property(VALUE).get();
break;
}
}
vertex.property(VALUE).set(ifBranchValue);
}
}
for (IExecutionGraphVertex vert : graph.vertexSet()) {
if (graph.outgoingEdgesOf(vert).isEmpty()) {
ExecutionGraphVertex root = (ExecutionGraphVertex) vert;
root.formula = buildFormula(root, graph);
break;
}
}
}
/* Modifications for: FORMULA */
// set formula_values to user-friendly string like: '1 + 2' or
// 'SUM(2,1)'
// For OPERATOR and FUNCTION types
protected CellFormulaExpression buildFormula(ExecutionGraphVertex vertex, DirectedGraph<IExecutionGraphVertex, DefaultEdge> graph) {
switch (vertex.type) {
case CELL_WITH_VALUE: {
CellFormulaExpression formula = (CellFormulaExpression) vertex.formula;
formula.formulaStr(vertex.property(NAME).get().toString());
formula.formulaValues(CellValue.fromCellValueToString(vertex.value()));
formula.formulaPtgStr(CellValue.fromCellValueToString(vertex.value()));
formula.ptgStr(vertex.property(NAME).get().toString());
if (vertex.property(VALUE).get().toString().isEmpty()) {
vertex.property(TYPE).set(EMPTY_CELL);
}
return formula;
}
case CELL_WITH_REFERENCE:
case CELL_WITH_FORMULA: {
DefaultEdge edge = graph.incomingEdgesOf(vertex).iterator().next();
ExecutionGraphVertex ivertex = (ExecutionGraphVertex) graph.getEdgeSource(edge);
CellFormulaExpression formula = buildFormula(ivertex, graph);
vertex.formula = CellFormulaExpression.copyOf(formula);
vertex.value = ivertex.value;
return CellFormulaExpression.copyOf(formula);
}
case OPERATOR:
case FUNCTION: {
Set<DefaultEdge> edges = graph.incomingEdgesOf(vertex);
List<String> formulaStringNodes = new LinkedList<>();
List<String> formulaValuesNodes = new LinkedList<>();
List<String> formulaPtgNodes = new LinkedList<>();
List<String> ptgNodes = new LinkedList<>();
Object[] formulaPtg = (Object[]) vertex.property(FORMULA_PTG).get();
for (DefaultEdge edge : edges) {
ExecutionGraphVertex ivertex = (ExecutionGraphVertex) graph.getEdgeSource(edge);
CellFormulaExpression formula = buildFormula(ivertex, graph);
formulaStringNodes.add(formula.formulaStr());
formulaValuesNodes.add(formula.formulaValues());
formulaPtgNodes.add(formula.formulaPtgStr());
ptgNodes.add(formula.ptgStr());
}
CellFormulaExpression iformula = (CellFormulaExpression) vertex.formula();
iformula.formulaStr(createFormulaString(formulaPtg[0], formulaStringNodes, vertex));
iformula.formulaValues(createFormulaString(formulaPtg[0], formulaValuesNodes, vertex));
iformula.formulaPtgStr(createPtgString(formulaPtg[0], formulaPtgNodes, vertex));
iformula.ptgStr(createPtgString(formulaPtg[0], ptgNodes, vertex));
CellFormulaExpression result = CellFormulaExpression.copyOf(iformula);
iformula.formulaPtgStr("");
iformula.ptgStr("");
return result;
}
case IF: {
Set<DefaultEdge> edges = graph.incomingEdgesOf(vertex);
List<String> formulaValuesNodes = new LinkedList<>();
List<String> formulaPtgNodes = new LinkedList<>();
List<String> ptgNodes = new LinkedList<>();
for (DefaultEdge edge : edges) {
ExecutionGraphVertex ivertex = (ExecutionGraphVertex) graph.getEdgeSource(edge);
CellFormulaExpression formula = buildFormula(ivertex, graph);
formulaValuesNodes.add(formula.formulaValues());
formulaPtgNodes.add(formula.formulaPtgStr());
ptgNodes.add(formula.ptgStr());
if (OPERATOR != ivertex.type) { vertex.value = ivertex.value; }
}
// TODO: are you sure you need only '=' ?
Collections.sort(formulaValuesNodes, (n1, n2) -> (n1.contains("=")||n1.contains("<")||n1.contains(">")) ? -1 : 0);
CellFormulaExpression iformula = (CellFormulaExpression) vertex.formula;
iformula.formulaValues(createFormulaString(null, formulaValuesNodes, vertex));
iformula.formulaPtgStr(createPtgString(null, formulaPtgNodes, vertex));
iformula.ptgStr(createPtgString(null, ptgNodes, vertex));
CellFormulaExpression result = CellFormulaExpression.copyOf(iformula);
iformula.formulaPtgStr("");
iformula.ptgStr("");
return result;
}
case RANGE: {
CellFormulaExpression iformula = (CellFormulaExpression) vertex.formula();
iformula.formulaStr(vertex.property(NAME).get().toString());
iformula.formulaValues(vertex.property(VALUE).get().toString());
iformula.formulaPtgStr(vertex.property(VALUE).get().toString());
iformula.ptgStr(vertex.property(NAME).get().toString());
connectValuesToRange(vertex);
return iformula;
}
case CONSTANT_VALUE: {
vertex.property(NAME).set(CONSTANT_VALUE_NAME);
CellFormulaExpression formula = (CellFormulaExpression) vertex.formula;
formula.formulaStr(vertex.property(NAME).get().toString());
formula.formulaValues(CellValue.fromCellValueToString(vertex.value()));
formula.formulaPtgStr(CellValue.fromCellValueToString(vertex.value()));
formula.ptgStr(vertex.property(NAME).get().toString());
return CellFormulaExpression.copyOf(formula);
}
default: {
return (CellFormulaExpression) vertex.formula;
}
}
}
protected void connectValuesToRange(ExecutionGraphVertex rangeVertex) {
Object cellValue = ((CellValue) rangeVertex.value()).get();
if (cellValue instanceof Area2DValues) {
for (String adress : ((Area2DValues) cellValue).getRangeCellAddresses()) {
if (addressToVertices.get(adress) == null) { continue; }
for (IExecutionGraphVertex cellVertex : addressToVertices.get(adress)) {
connect(cellVertex, rangeVertex);
}
}
}
}
protected boolean isSkipVertex(Ptg ptg) {
return ptg instanceof ParenthesisPtg;
}
protected String createFormulaString(Object optg, List<String> ops, ExecutionGraphVertex vertex) {
String opname = "";
if (optg == null) { // IF
opname = "IF";
} else if (optg instanceof Ptg) {
opname = ptgToString((Ptg) optg);
if (UNDEFINED_EXTERNAL_FUNCTION.equals(opname)) {
/* if the function was not recognized as
internal function we use the node
name as the function name */
opname = vertex.name();
}
} else {
opname = optg.toString();
}
if (optg == null || optg instanceof AbstractFunctionPtg) {
return stripRedundantSymbols(format("%s(%s)",
opname,
join(",", asList(ops)
.stream()
.map(v -> v.toString())
.collect(toList()))));
} else if (optg instanceof ValueOperatorPtg) {
return stripRedundantSymbols(format("%s %s %s", (ops.size() > 0) ? ops.get(0) : "", opname, (ops.size() > 1) ? ops.get(1) : ""));
}
return "";
}
protected String createPtgString(Object optg, List<String> ops, ExecutionGraphVertex vertex) {
String opname = "";
if (optg == null) {
opname = "IF";
return stripRedundantSymbols(format("%s %s ",
join(",", asList(ops)
.stream()
.map(v -> v.toString())
.collect(toList())),
opname));
} else {
opname = optg instanceof Ptg ? ptgToString((Ptg) optg) : optg.toString();
/* if the function was not recognized as
internal function we use the node
name as the function name */
opname = UNDEFINED_EXTERNAL_FUNCTION.equals(opname) ? vertex.name() : opname;
}
if (optg instanceof AbstractFunctionPtg) {
return stripRedundantSymbols(format("%s %s ",
join(",", asList(ops)
.stream()
.map(v -> v.toString())
.collect(toList())),
opname));
} else if (optg instanceof ValueOperatorPtg) {
return stripRedundantSymbols(String.format("%s %s %s", (ops.size() > 0) ? ops.get(0) : "", (ops.size() > 1) ? ops.get(1) : "", opname));
}
return "";
}
protected static String stripRedundantSymbols(String inline) {
return inline.replace("[", "").replace("]", "");
}
public static String ptgToString(Ptg ptg) {
Class<? extends Ptg> ptgCls = ptg.getClass();
if (ptgCls.isAssignableFrom(AddPtg.class)) {
return "+";
} else if (ptgCls.isAssignableFrom(SubtractPtg.class)) {
return "-";
} else if (ptgCls.isAssignableFrom(DividePtg.class)) {
return "/";
} else if (ptgCls.isAssignableFrom(MultiplyPtg.class)) {
return "*";
} else if (ptgCls.isAssignableFrom(EqualPtg.class)) {
return "=";
} else if (ptgCls.isAssignableFrom(GreaterThanPtg.class)) {
return ">";
} else if (ptgCls.isAssignableFrom(LessThanPtg.class)) {
return "<";
} else if (ptgCls.isAssignableFrom(NotEqualPtg.class)) {
return "<>";
} else if (ptgCls.isAssignableFrom(UnaryPlusPtg.class)) {
return "+";
}
try {
return ptg.toFormulaString();
} catch (Exception e) {
return ptg.getClass().getSimpleName();
}
}
public static Type ptgToVertexType(Ptg ptg) {
if (ptg instanceof AbstractFunctionPtg) { // functions: SUM, COUNT, COS, etc.
return FUNCTION;
} else if (ptg instanceof ValueOperatorPtg) { // single operators: +, -, /, *, =
return OPERATOR;
} else if (ptg instanceof RefPtg || ptg instanceof Ref3DPxg || ptg instanceof NameXPxg) {
return CELL_WITH_VALUE;
} else if (ptg instanceof ScalarConstantPtg) {
return CONSTANT_VALUE;
} else if (ptg instanceof AreaPtg) {
return RANGE;
}
// TODO: add more for our cases
throw new IllegalArgumentException("Unsupported Ptg class: " + ptg.getClass());
}
/**
* Does copy of all properties for every Vertex from @param vertices. the
* first @param istandard is used as object to copy from.
*/
protected static void copyProperties(IExecutionGraphVertex istandard, Set<IExecutionGraphVertex> vertices) {
ExecutionGraphVertex standard = (ExecutionGraphVertex) istandard;
for (IExecutionGraphVertex ivertex : vertices) {
if (istandard == ivertex) { continue; }
ExecutionGraphVertex vertex = (ExecutionGraphVertex) ivertex;
// copy properties
for (PropertyName pname : PropertyName.values()) {
if (pname == PropertyName.VERTEX_ID) continue;
if (pname == PropertyName.INDEX_IN_FORMULA) continue;
vertex.property(pname).set(standard.property(pname).get());
}
}
}
// TODO: not the best solution, but works as for now
protected static boolean isCompareOperand(String name) {
return "=".equals(name) || ">".equals(name) || "<".equals(name) || "<>".equals(name);
}
}
| #40 fixing for code review comments
| calculation-engine/engine-core/src/main/java/com/dataart/spreadsheetanalytics/engine/execgraph/PoiExecutionGraphBuilder.java | #40 fixing for code review comments |
|
Java | apache-2.0 | b0b2026408574b5a241a5899da35d84c1ad076b9 | 0 | bhathiya/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,harsha89/carbon-apimgt,nuwand/carbon-apimgt,nuwand/carbon-apimgt,wso2/carbon-apimgt,uvindra/carbon-apimgt,bhathiya/carbon-apimgt,tharikaGitHub/carbon-apimgt,Rajith90/carbon-apimgt,uvindra/carbon-apimgt,prasa7/carbon-apimgt,nuwand/carbon-apimgt,wso2/carbon-apimgt,praminda/carbon-apimgt,fazlan-nazeem/carbon-apimgt,jaadds/carbon-apimgt,wso2/carbon-apimgt,malinthaprasan/carbon-apimgt,isharac/carbon-apimgt,Rajith90/carbon-apimgt,jaadds/carbon-apimgt,chamindias/carbon-apimgt,malinthaprasan/carbon-apimgt,harsha89/carbon-apimgt,praminda/carbon-apimgt,tharindu1st/carbon-apimgt,fazlan-nazeem/carbon-apimgt,chamilaadhi/carbon-apimgt,chamilaadhi/carbon-apimgt,uvindra/carbon-apimgt,chamindias/carbon-apimgt,ruks/carbon-apimgt,isharac/carbon-apimgt,praminda/carbon-apimgt,chamilaadhi/carbon-apimgt,malinthaprasan/carbon-apimgt,harsha89/carbon-apimgt,prasa7/carbon-apimgt,ruks/carbon-apimgt,jaadds/carbon-apimgt,Rajith90/carbon-apimgt,tharikaGitHub/carbon-apimgt,chamindias/carbon-apimgt,chamindias/carbon-apimgt,jaadds/carbon-apimgt,tharikaGitHub/carbon-apimgt,tharindu1st/carbon-apimgt,harsha89/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,isharac/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,uvindra/carbon-apimgt,prasa7/carbon-apimgt,bhathiya/carbon-apimgt,chamilaadhi/carbon-apimgt,isharac/carbon-apimgt,fazlan-nazeem/carbon-apimgt,bhathiya/carbon-apimgt,wso2/carbon-apimgt,malinthaprasan/carbon-apimgt,nuwand/carbon-apimgt,ruks/carbon-apimgt,tharikaGitHub/carbon-apimgt,prasa7/carbon-apimgt,tharindu1st/carbon-apimgt,sanjeewa-malalgoda/carbon-apimgt,tharindu1st/carbon-apimgt,fazlan-nazeem/carbon-apimgt,ruks/carbon-apimgt,Rajith90/carbon-apimgt | /*
* Copyright (c) 2019 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.rest.api.publisher.v1.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import graphql.language.FieldDefinition;
import graphql.language.ObjectTypeDefinition;
import graphql.language.TypeDefinition;
import graphql.schema.GraphQLSchema;
import graphql.schema.idl.SchemaParser;
import graphql.schema.idl.TypeDefinitionRegistry;
import graphql.schema.idl.UnExecutableSchemaGenerator;
import graphql.schema.idl.errors.SchemaProblem;
import graphql.schema.validation.SchemaValidationError;
import graphql.schema.validation.SchemaValidator;
import org.apache.axiom.util.base64.Base64Utils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.apache.http.HttpStatus;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.cxf.phase.PhaseInterceptorChain;
import org.apache.cxf.jaxrs.ext.multipart.Attachment;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.wso2.carbon.apimgt.api.APIDefinition;
import org.wso2.carbon.apimgt.api.APIDefinitionValidationResponse;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIProvider;
import org.wso2.carbon.apimgt.api.FaultGatewaysException;
import org.wso2.carbon.apimgt.api.MonetizationException;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIStateChangeResponse;
import org.wso2.carbon.apimgt.api.model.APIStore;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.api.model.Documentation;
import org.wso2.carbon.apimgt.api.model.DuplicateAPIException;
import org.wso2.carbon.apimgt.api.model.KeyManager;
import org.wso2.carbon.apimgt.api.model.Label;
import org.wso2.carbon.apimgt.api.model.LifeCycleEvent;
import org.wso2.carbon.apimgt.api.model.Monetization;
import org.wso2.carbon.apimgt.api.model.ResourceFile;
import org.wso2.carbon.apimgt.api.model.ResourcePath;
import org.wso2.carbon.apimgt.api.model.Scope;
import org.wso2.carbon.apimgt.api.model.SubscribedAPI;
import org.wso2.carbon.apimgt.api.model.SwaggerData;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.api.model.URITemplate;
import org.wso2.carbon.apimgt.api.model.policy.APIPolicy;
import org.wso2.carbon.apimgt.api.model.policy.Policy;
import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.APIManagerConfiguration;
import org.wso2.carbon.apimgt.impl.GZIPUtils;
import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO;
import org.wso2.carbon.apimgt.impl.definitions.APIDefinitionFromOpenAPISpec;
import org.wso2.carbon.apimgt.impl.definitions.OAS2Parser;
import org.wso2.carbon.apimgt.impl.definitions.OAS3Parser;
import org.wso2.carbon.apimgt.impl.definitions.OASParserUtil;
import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.impl.utils.APIVersionStringComparator;
import org.wso2.carbon.apimgt.impl.wsdl.SequenceGenerator;
import org.wso2.carbon.apimgt.impl.wsdl.model.WSDLValidationResponse;
import org.wso2.carbon.apimgt.impl.wsdl.util.SOAPOperationBindingUtils;
import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.impl.wsdl.util.SequenceUtils;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.ApisApiService;
import java.io.*;
import java.lang.reflect.Field;
import java.net.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Map;
import org.apache.cxf.jaxrs.ext.multipart.Attachment;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.*;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.RestApiPublisherUtils;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.APIMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.DocumentationMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.ExternalStoreMappingUtil;
import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants;
import org.wso2.carbon.apimgt.rest.api.util.dto.ErrorDTO;
import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.util.Optional;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
public class ApisApiServiceImpl implements ApisApiService {
private static final Log log = LogFactory.getLog(ApisApiServiceImpl.class);
@Override
public Response apisGet(Integer limit, Integer offset, String xWSO2Tenant, String query,
String ifNoneMatch, Boolean expand, String accept ,String tenantDomain, MessageContext messageContext) {
List<API> allMatchedApis = new ArrayList<>();
APIListDTO apiListDTO;
//pre-processing
//setting default limit and offset values if they are not set
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
query = query == null ? "" : query;
expand = (expand != null && expand) ? true : false;
try {
String newSearchQuery = APIUtil.constructNewSearchQuery(query);
//revert content search back to normal search by name to avoid doc result complexity and to comply with REST api practices
if (newSearchQuery.startsWith(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + "=")) {
newSearchQuery = newSearchQuery
.replace(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + "=", APIConstants.NAME_TYPE_PREFIX + "=");
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
// We should send null as the provider, Otherwise searchAPIs will return all APIs of the provider
// instead of looking at type and query
String username = RestApiUtil.getLoggedInUsername();
tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(username));
boolean migrationMode = Boolean.getBoolean(RestApiConstants.MIGRATION_MODE);
/*if (migrationMode) { // migration flow
if (!StringUtils.isEmpty(targetTenantDomain)) {
tenantDomain = targetTenantDomain;
}
RestApiUtil.handleMigrationSpecificPermissionViolations(tenantDomain, username);
}*/
Map<String, Object> result = apiProvider.searchPaginatedAPIs(newSearchQuery, tenantDomain,
offset, limit, false);
Set<API> apis = (Set<API>) result.get("apis");
allMatchedApis.addAll(apis);
apiListDTO = APIMappingUtil.fromAPIListToDTO(allMatchedApis, expand);
//Add pagination section in the response
Object totalLength = result.get("length");
Integer length = 0;
if (totalLength != null) {
length = (Integer) totalLength;
}
APIMappingUtil.setPaginationParams(apiListDTO, query, offset, limit, length);
if (APIConstants.APPLICATION_GZIP.equals(accept)) {
try {
File zippedResponse = GZIPUtils.constructZippedResponse(apiListDTO);
return Response.ok().entity(zippedResponse)
.header("Content-Disposition", "attachment").
header("Content-Encoding", "gzip").build();
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(e.getMessage(), e, log);
}
} else {
return Response.ok().entity(apiListDTO).build();
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving APIs";
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisPost(APIDTO body, String oasVersion, MessageContext messageContext) {
URI createdApiUri;
APIDTO createdApiDTO;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String username = RestApiUtil.getLoggedInUsername();
boolean isGraphQL = APIDTO.TypeEnum.GRAPHQL == body.getType();
boolean isWSAPI = APIDTO.TypeEnum.WS == body.getType();
boolean isSoapToRestConvertedApi = APIDTO.TypeEnum.SOAPTOREST == body.getType();
// validate web socket api endpoint configurations
if (isWSAPI && !RestApiPublisherUtils.isValidWSAPI(body)) {
RestApiUtil.handleBadRequest("Endpoint URLs should be valid web socket URLs", log);
}
API apiToAdd = prepareToCreateAPIByDTO(body);
//adding the api
apiProvider.addAPI(apiToAdd);
if (isSoapToRestConvertedApi) {
if (StringUtils.isNotBlank(apiToAdd.getWsdlUrl())) {
String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(body.getWsdlUri());
apiProvider.saveSwaggerDefinition(apiToAdd, swaggerStr);
SequenceGenerator.generateSequencesFromSwagger(swaggerStr, new Gson().toJson(body));
} else {
String errorMessage =
"Error while generating the swagger since the wsdl url is null for: " + body.getProvider()
+ "-" + body.getName() + "-" + body.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, log);
}
} else if (!isWSAPI) {
APIDefinition oasParser;
if(RestApiConstants.OAS_VERSION_2.equalsIgnoreCase(oasVersion)) {
oasParser = new OAS2Parser();
} else {
oasParser = new OAS3Parser();
}
SwaggerData swaggerData = new SwaggerData(apiToAdd);
String apiDefinition = oasParser.generateAPIDefinition(swaggerData);
apiProvider.saveSwaggerDefinition(apiToAdd, apiDefinition);
}
APIIdentifier createdApiId = apiToAdd.getId();
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Prepares the API Model object to be created using the DTO object
*
* @param body APIDTO of the API
* @return API object to be created
* @throws APIManagementException Error while creating the API
*/
private API prepareToCreateAPIByDTO(APIDTO body) throws APIManagementException {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String username = RestApiUtil.getLoggedInUsername();
List<String> apiSecuritySchemes = body.getSecurityScheme();//todo check list vs string
if (!apiProvider.isClientCertificateBasedAuthenticationConfigured() && apiSecuritySchemes != null) {
for (String apiSecurityScheme : apiSecuritySchemes) {
if (apiSecurityScheme.contains(APIConstants.API_SECURITY_MUTUAL_SSL)) {
RestApiUtil.handleBadRequest("Mutual SSL Based authentication is not supported in this server", log);
}
}
}
if (body.getAccessControlRoles() != null) {
String errorMessage = RestApiPublisherUtils.validateUserRoles(body.getAccessControlRoles());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getAdditionalProperties() != null) {
String errorMessage = RestApiPublisherUtils
.validateAdditionalProperties(body.getAdditionalProperties());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getContext() == null) {
RestApiUtil.handleBadRequest("Parameter: \"context\" cannot be null", log);
} else if (body.getContext().endsWith("/")) {
RestApiUtil.handleBadRequest("Context cannot end with '/' character", log);
}
if (apiProvider.isApiNameWithDifferentCaseExist(body.getName())) {
RestApiUtil.handleBadRequest("Error occurred while adding API. API with name " + body.getName()
+ " already exists.", log);
}
//Get all existing versions of api been adding
List<String> apiVersions = apiProvider.getApiVersionsMatchingApiName(body.getName(), username);
if (apiVersions.size() > 0) {
//If any previous version exists
for (String version : apiVersions) {
if (version.equalsIgnoreCase(body.getVersion())) {
//If version already exists
if (apiProvider.isDuplicateContextTemplate(body.getContext())) {
RestApiUtil.handleResourceAlreadyExistsError("Error occurred while " +
"adding the API. A duplicate API already exists for "
+ body.getName() + "-" + body.getVersion(), log);
} else {
RestApiUtil.handleBadRequest("Error occurred while adding API. API with name " +
body.getName() + " already exists with different " +
"context", log);
}
}
}
} else {
//If no any previous version exists
if (apiProvider.isDuplicateContextTemplate(body.getContext())) {
RestApiUtil.handleBadRequest("Error occurred while adding the API. A duplicate API context " +
"already exists for " + body.getContext(), log);
}
}
//Check if the user has admin permission before applying a different provider than the current user
String provider = body.getProvider();
if (!StringUtils.isBlank(provider) && !provider.equals(username)) {
if (!APIUtil.hasPermission(username, APIConstants.Permissions.APIM_ADMIN)) {
if (log.isDebugEnabled()) {
log.debug("User " + username + " does not have admin permission ("
+ APIConstants.Permissions.APIM_ADMIN + ") hence provider (" +
provider + ") overridden with current user (" + username + ")");
}
provider = username;
}
} else {
//Set username in case provider is null or empty
provider = username;
}
List<String> tiersFromDTO = body.getPolicies();
//check whether the added API's tiers are all valid
Set<Tier> definedTiers = apiProvider.getTiers();
List<String> invalidTiers = RestApiUtil.getInvalidTierNames(definedTiers, tiersFromDTO);
if (invalidTiers.size() > 0) {
RestApiUtil.handleBadRequest(
"Specified tier(s) " + Arrays.toString(invalidTiers.toArray()) + " are invalid", log);
}
APIPolicy apiPolicy = apiProvider.getAPIPolicy(username, body.getApiThrottlingPolicy());
if (apiPolicy == null && body.getApiThrottlingPolicy() != null) {
RestApiUtil.handleBadRequest(
"Specified policy " + body.getApiThrottlingPolicy() + " is invalid", log);
}
API apiToAdd = APIMappingUtil.fromDTOtoAPI(body, provider);
//Overriding some properties:
//only allow CREATED as the stating state for the new api if not status is PROTOTYPED
if (!APIConstants.PROTOTYPED.equals(apiToAdd.getStatus())) {
apiToAdd.setStatus(APIConstants.CREATED);
}
//we are setting the api owner as the logged in user until we support checking admin privileges and assigning
// the owner as a different user
apiToAdd.setApiOwner(provider);
//attach micro-geteway labels
assignLabelsToDTO(body,apiToAdd);
// set default API Level Policy
if (StringUtils.isBlank(apiToAdd.getApiLevelPolicy())) {
Policy[] apiPolicies = apiProvider.getPolicies(username, PolicyConstants.POLICY_LEVEL_API);
if (apiPolicies.length > 0) {
for (Policy policy : apiPolicies) {
if (policy.getPolicyName().equals(APIConstants.UNLIMITED_TIER)) {
apiToAdd.setApiLevelPolicy(APIConstants.UNLIMITED_TIER);
break;
}
}
if (StringUtils.isBlank(apiToAdd.getApiLevelPolicy())) {
apiToAdd.setApiLevelPolicy(apiPolicies[0].getPolicyName());
}
}
}
return apiToAdd;
}
@Override
public Response apisApiIdGet(String apiId, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) {
APIDTO apiToReturn = getAPIByID(apiId);
return Response.ok().entity(apiToReturn).build();
}
/**
* Get GraphQL Schema of given API
*
* @param apiId apiId
* @param accept
* @param ifNoneMatch If--Match header value
* @param messageContext message context
* @return Response with GraphQL Schema
*/
@Override
public Response apisApiIdGraphqlSchemaGet(String apiId, String accept, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId,
tenantDomain);
String schemaContent = apiProvider.getGraphqlSchema(apiIdentifier);
GraphQLSchemaDTO dto = new GraphQLSchemaDTO();
dto.setSchemaDefinition(schemaContent);
dto.setName(apiIdentifier.getProviderName() + APIConstants.GRAPHQL_SCHEMA_PROVIDER_SEPERATOR +
apiIdentifier.getApiName() + apiIdentifier.getVersion() + APIConstants.GRAPHQL_SCHEMA_FILE_EXTENSION);
return Response.ok().entity(dto).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving schema of API: " + apiId, e,
log);
} else {
String errorMessage = "Error while retrieving schema of API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Update GraphQL Schema
* @param apiId api Id
* @param schemaDefinition graphQL schema definition
* @param ifMatch
* @param messageContext
* @return
*/
@Override
public Response apisApiIdGraphqlSchemaPut(String apiId, String schemaDefinition, String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId,
tenantDomain);
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
List<APIOperationsDTO> operationArray = extractGraphQLOperationList(schemaDefinition);
Set<URITemplate> uriTemplates = APIMappingUtil.getURITemplates(originalAPI, operationArray);
originalAPI.setUriTemplates(uriTemplates);
apiProvider.saveGraphqlSchemaDefinition(originalAPI, schemaDefinition);
apiProvider.updateAPI(originalAPI);
String schema = apiProvider.getGraphqlSchema(apiIdentifier);
return Response.ok().entity(schema).build();
} catch (APIManagementException | FaultGatewaysException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving schema of API: " + apiId, e,
log);
} else {
String errorMessage = "Error while uploading schema of the API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response apisApiIdPut(String apiId, APIDTO body, String ifMatch, MessageContext messageContext) {
APIDTO updatedApiDTO;
String[] tokenScopes =
(String[]) PhaseInterceptorChain.getCurrentMessage().getExchange().get(RestApiConstants.USER_REST_API_SCOPES);
// Validate if the USER_REST_API_SCOPES is not set in WebAppAuthenticator when scopes are validated
if (tokenScopes == null) {
RestApiUtil.handleInternalServerError("Error occurred while updating the API " + apiId +
" as the token information hasn't been correctly set internally", log);
return null;
}
try {
String username = RestApiUtil.getLoggedInUsername();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getProvider(username);
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = originalAPI.getId();
boolean isWSAPI = originalAPI.getType() != null && APIConstants.APITransportType.WS == APIConstants.APITransportType
.valueOf(originalAPI.getType());
org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] apiDtoClassAnnotatedScopes =
APIDTO.class.getAnnotationsByType(org.wso2.carbon.apimgt.rest.api.util.annotations.Scope.class);
boolean hasClassLevelScope = checkClassScopeAnnotation(apiDtoClassAnnotatedScopes, tokenScopes);
if (!hasClassLevelScope) {
// Validate per-field scopes
body = getFieldOverriddenAPIDTO(body, originalAPI, tokenScopes);
}
//Overriding some properties:
body.setName(apiIdentifier.getApiName());
body.setVersion(apiIdentifier.getVersion());
body.setProvider(apiIdentifier.getProviderName());
body.setContext(originalAPI.getContextTemplate());
body.setLifeCycleStatus(originalAPI.getStatus());
body.setType(APIDTO.TypeEnum.fromValue(originalAPI.getType()));
// Validate API Security
List<String> apiSecurity = body.getSecurityScheme();
if (!apiProvider.isClientCertificateBasedAuthenticationConfigured() && apiSecurity != null && apiSecurity
.contains(APIConstants.API_SECURITY_MUTUAL_SSL)) {
RestApiUtil.handleBadRequest("Mutual SSL based authentication is not supported in this server.", log);
}
//validation for tiers
List<String> tiersFromDTO = body.getPolicies();
if (tiersFromDTO == null || tiersFromDTO.isEmpty()) {
RestApiUtil.handleBadRequest("No tier defined for the API", log);
}
//check whether the added API's tiers are all valid
Set<Tier> definedTiers = apiProvider.getTiers();
List<String> invalidTiers = RestApiUtil.getInvalidTierNames(definedTiers, tiersFromDTO);
if (invalidTiers.size() > 0) {
RestApiUtil.handleBadRequest(
"Specified tier(s) " + Arrays.toString(invalidTiers.toArray()) + " are invalid", log);
}
if (body.getAccessControlRoles() != null) {
String errorMessage = RestApiPublisherUtils.validateUserRoles(body.getAccessControlRoles());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getAdditionalProperties() != null) {
String errorMessage = RestApiPublisherUtils
.validateAdditionalProperties(body.getAdditionalProperties());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
API apiToUpdate = APIMappingUtil.fromDTOtoAPI(body, apiIdentifier.getProviderName());
apiToUpdate.setThumbnailUrl(originalAPI.getThumbnailUrl());
//attach micro-geteway labels
assignLabelsToDTO(body, apiToUpdate);
apiProvider.manageAPI(apiToUpdate);
if (!isWSAPI) {
String oldDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
Optional<APIDefinition> definitionOptional = OASParserUtil.getOASParser(oldDefinition);
if(!definitionOptional.isPresent()) {
RestApiUtil.handleInternalServerError("Error occurred while getting swagger parser.", log);
return null;
}
APIDefinition apiDefinition = definitionOptional.get();
SwaggerData swaggerData = new SwaggerData(apiToUpdate);
String newDefinition = apiDefinition.generateAPIDefinition(swaggerData, oldDefinition,
true);
apiProvider.saveSwagger20Definition(apiToUpdate.getId(), newDefinition);
}
API updatedApi = apiProvider.getAPI(apiIdentifier);
updatedApiDTO = APIMappingUtil.fromAPItoDTO(updatedApi);
return Response.ok().entity(updatedApiDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while updating API : " + apiId, e, log);
} else {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisApiIdAuditapiGet(String apiId, String accept, MessageContext messageContext) {
boolean isDebugEnabled = log.isDebugEnabled();
try {
String username = RestApiUtil.getLoggedInUsername();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getProvider(username);
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = api.getId();
String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
// Get configuration file and retrieve API token
APIManagerConfiguration config = ServiceReferenceHolder.getInstance()
.getAPIManagerConfigurationService().getAPIManagerConfiguration();
String apiToken = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN);
// Retrieve the uuid from the database
String uuid = ApiMgtDAO.getInstance().getAuditApiId(apiIdentifier);
// Initiate JSON Parser
JSONParser parser = new JSONParser();
// TODO - Remove if not needed
// JSONObject jsonObject;
//
// // Parse JSON String of API Definition
// jsonObject = (JSONObject) parser.parse(apiDefinition);
if (uuid != null) {
// PUT Request
// Set the property to be attached in the body of the request
// Attach API Definition to property called specfile to be sent in the request
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("{\n");
stringBuilder.append(" \"specfile\": \"").append(Base64Utils.encode(apiDefinition.getBytes("UTF-8"))).append("\" \n");
stringBuilder.append("}");
// Logic for HTTP Request
String putUrl = "https://platform.42crunch.com/api/v1/apis/" + uuid;
org.apache.axis2.util.URL updateApiUrl = new org.apache.axis2.util.URL(putUrl);
try (CloseableHttpClient httpClient = (CloseableHttpClient) APIUtil.getHttpClient(updateApiUrl.getPort(), updateApiUrl.getProtocol())) {
HttpPut httpPut = new HttpPut(putUrl);
// Set the header properties of the request
httpPut.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpPut.setHeader(APIConstants.HEADER_CONTENT_TYPE, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpPut.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
httpPut.setEntity(new StringEntity(stringBuilder.toString()));
// Code block for processing the response
try (CloseableHttpResponse response = httpClient.execute(httpPut)) {
if (isDebugEnabled) {
log.debug("HTTP status " + response.getStatusLine().getStatusCode());
}
if (!(response.getStatusLine().getStatusCode() == HttpStatus.SC_OK)) {
throw new APIManagementException("Error while sending data to " + putUrl +
". Found http status " + response.getStatusLine());
}
} finally {
httpPut.releaseConnection();
}
}
} else {
final String boundary = "X-WSO2-BOUNDARY";
final String LINE_FEED = "\r\n";
HttpURLConnection httpConn;
OutputStream outputStream;
PrintWriter writer;
String postUrl = "https://platform.42crunch.com/api/v1/apis";
String collectionId = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_CID);
URL url = new URL (postUrl);
httpConn = (HttpURLConnection) url.openConnection();
httpConn.setUseCaches(false);
httpConn.setDoOutput(true); // indicates POST method
httpConn.setDoInput(true);
httpConn.setRequestProperty("Content-Type",
"multipart/form-data; boundary=" + boundary);
httpConn.setRequestProperty("Accept", "application/json");
httpConn.setRequestProperty("x-api-key", "b57973cf-b74c-4ade-921d-ece83251eceb");
outputStream = httpConn.getOutputStream();
writer = new PrintWriter(new OutputStreamWriter(outputStream),
true);
// Name property
writer.append("--" + boundary).append(LINE_FEED);
writer.append("Content-Disposition: form-data; name=\"name\"")
.append(LINE_FEED);
writer.append(LINE_FEED);
writer.append(apiIdentifier.getApiName()).append(LINE_FEED);
writer.flush();
// Specfile property
writer.append("--" + boundary).append(LINE_FEED);
writer.append("Content-Disposition: form-data; name=\"specfile\"; filename=\"swagger.json\"")
.append(LINE_FEED);
writer.append("Content-Type: application/json").append(
LINE_FEED);
writer.append(LINE_FEED);
writer.append(apiDefinition).append(LINE_FEED);
writer.flush();
// CollectionID property
writer.append("--" + boundary).append(LINE_FEED);
writer.append("Content-Disposition: form-data; name=\"cid\"")
.append(LINE_FEED);
writer.append(LINE_FEED);
writer.append(collectionId).append(LINE_FEED);
writer.flush();
writer.append("--" + boundary + "--").append(LINE_FEED);
writer.close();
// Checks server's status code first
int status = httpConn.getResponseCode();
if (status == HttpURLConnection.HTTP_OK) {
if(isDebugEnabled) {
log.debug("HTTP status " + status);
}
BufferedReader reader = new BufferedReader(new InputStreamReader(
httpConn.getInputStream()));
String inputLine = null;
StringBuilder responseString = new StringBuilder();
while((inputLine = reader.readLine()) != null) {
responseString.append(inputLine);
}
reader.close();
httpConn.disconnect();
JSONObject responseJson = (JSONObject) new JSONParser().parse(responseString.toString());
uuid = (String) ((JSONObject) responseJson.get("desc")).get("id");
ApiMgtDAO.getInstance().addAuditApiMapping(apiIdentifier, uuid);
}
}
// Logic for the HTTP request
String getUrl = "https://platform.42crunch.com/api/v1/apis/" + uuid + "/assessmentreport";
org.apache.axis2.util.URL getReportUrl = new org.apache.axis2.util.URL(getUrl);
try (CloseableHttpClient getHttpClient = (CloseableHttpClient) APIUtil.getHttpClient(getReportUrl.getPort(), getReportUrl.getProtocol())) {
HttpGet httpGet = new HttpGet(getUrl);
// Set the header properties of the request
httpGet.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpGet.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
// Code block for the processing of the response
try (CloseableHttpResponse response = getHttpClient.execute(httpGet)) {
if (isDebugEnabled) {
log.debug("HTTP status " + response.getStatusLine().getStatusCode());
}
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
BufferedReader reader = new BufferedReader(
new InputStreamReader(response.getEntity().getContent()));
String inputLine;
StringBuilder responseString = new StringBuilder();
while ((inputLine = reader.readLine()) != null) {
responseString.append(inputLine);
}
JSONObject responseJson = (JSONObject) new JSONParser().parse(responseString.toString());
String report = responseJson.get("data").toString();
String grade = (String) ((JSONObject) ((JSONObject) responseJson.get("attr")).get("data")).get("grade");
Integer numErrors = Integer.valueOf((String) ((JSONObject) ((JSONObject) responseJson.get("attr")).get("data")).get("numErrors"));
String decodedReport = new String(Base64Utils.decode(report));
AuditReportDTO auditReportDTO = new AuditReportDTO();
auditReportDTO.setReport(decodedReport);
auditReportDTO.setGrade(grade);
auditReportDTO.setNumErrors(numErrors);
return Response.ok().entity(auditReportDTO).build();
}
}
}
} catch (IOException e) {
log.error("Error occurred while getting HttpClient instance");
} catch (ParseException e) {
log.error("API Definition String could not be parsed into JSONObject.");
} catch (APIManagementException e) {
String errorMessage = "Error while Auditing API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisApiIdAuditapiPost(String apiId, APISecurityAuditInfoDTO body, String accept, MessageContext messageContext) {
// TODO - This method is to be removed in favour of apisApiIdAuditapiGet
// boolean isDebugEnabled = log.isDebugEnabled();
// try {
// String username = RestApiUtil.getLoggedInUsername();
// String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
// APIProvider apiProvider = RestApiUtil.getProvider(username);
// API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
// APIIdentifier apiIdentifier = api.getId();
// String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
//
// // Get configuration file and retrieve API token and Collection ID
// APIManagerConfiguration config = ServiceReferenceHolder.getInstance()
// .getAPIManagerConfigurationService().getAPIManagerConfiguration();
// String apiToken = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN);
// String collectionId = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_CID);
//
// // Initiate JSON parser.
// JSONParser parser = new JSONParser();
// JSONObject jsonObject;
//
// // Parse JSON String of API Definition
// jsonObject = (JSONObject) parser.parse(apiDefinition);
//
// // Set properties to be attached in the body of the request
// body.setName(apiIdentifier.getApiName());
// body.setCid(collectionId);
// body.setSpecfile(jsonObject);
//
// // Logic for HTTP Request
// URL auditUrl = new URL("https://platform.42crunch.com/api/v1/apis");
// try (CloseableHttpClient httpClient = (CloseableHttpClient) APIUtil.getHttpClient(auditUrl.getPort(), auditUrl.getProtocol())) {
// HttpPost httpPost = new HttpPost(String.valueOf(auditUrl));
//
// // Construct the JSON String to be passed in the request
// StringBuilder bodyString = new StringBuilder();
// bodyString.append("{ \n");
// bodyString.append(" \"specfile\": ").append(body.getSpecfile()).append("\n");
// bodyString.append(" \"cid\": ").append(body.getCid()).append("\n");
// bodyString.append(" \"name\": ").append(body.getName()).append("\n");
// bodyString.append("}");
//
// // Set the header properties of the request
// httpPost.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPost.setHeader(APIConstants.HEADER_CONTENT_TYPE, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPost.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
// httpPost.setEntity(new StringEntity(bodyString.toString()));
//
// // Code block for the processing of the response
// try(CloseableHttpResponse response = httpClient.execute(httpPost)) {
// if (isDebugEnabled) {
// log.debug("HTTP status " + response.getStatusLine().getStatusCode());
// }
// if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
// BufferedReader reader = new BufferedReader(
// new InputStreamReader(response.getEntity().getContent()));
// String inputLine;
// StringBuilder responseString = new StringBuilder();
//
// while((inputLine = reader.readLine()) != null) {
// responseString.append(inputLine);
// }
// JSONObject responseObject;
// responseObject = (JSONObject) parser.parse(responseString.toString());
// String newAuditAPIId = (String)((JSONObject) responseObject.get("desc")).get("id");
// ApiMgtDAO.getInstance().addAuditApiMapping(apiIdentifier, newAuditAPIId);
//
// return Response.ok().entity(newAuditAPIId).build();
// } else {
// throw new APIManagementException(
// "Error while retrieving data from " + auditUrl + ". Found http status " + response
// .getStatusLine());
// }
// } finally {
// httpPost.releaseConnection();
// }
// } catch (IOException e) {
// log.error("Error occurred while getting HttpClient instance");
// }
// } catch (APIManagementException e) {
// String errorMessage = "Error while creating new Audit API : " + apiId;
// RestApiUtil.handleInternalServerError(errorMessage, e, log);
// } catch (ParseException e) {
// log.error("API Definition String could not be parsed into JSONObject.");
// }
return null;
}
@Override
public Response apisApiIdAuditapiPut(String apiId, String accept, MessageContext messageContext) {
// TODO - This method is to be removed in favour of apisApiIdAuditapiGet
// boolean isDebugEnabled = log.isDebugEnabled();
// try {
// String username = RestApiUtil.getLoggedInUsername();
// String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
// APIProvider apiProvider = RestApiUtil.getProvider(username);
// API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
// APIIdentifier apiIdentifier = api.getId();
// String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
//
// // Get configuration file and retrieve API token
// APIManagerConfiguration config = ServiceReferenceHolder.getInstance()
// .getAPIManagerConfigurationService().getAPIManagerConfiguration();
// String apiToken = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN);
//
// // Initiate JSON Parser
// JSONParser parser = new JSONParser();
// JSONObject jsonObject;
//
// // Parse JSON String of API Definition
// jsonObject = (JSONObject) parser.parse(apiDefinition);
//
// // Set the property to be attached in the body of the request
// // Attach API Definition to property called specfile to be sent in the request
// StringBuilder stringBuilder = new StringBuilder();
// stringBuilder.append("{\n");
// stringBuilder.append(" \"specfile\": ").append(jsonObject).append("\n");
// stringBuilder.append("}");
//
// // Retrieve the uuid from the database
// String uuid = ApiMgtDAO.getInstance().getAuditApiId(apiIdentifier);
//
// // Logic for HTTP Request
// URL auditURL = new URL("https://platform.42crunch.com/api/v1/apis/" + uuid);
// try (CloseableHttpClient httpClient = (CloseableHttpClient) APIUtil.getHttpClient(auditURL.getPort(), auditURL.getProtocol())) {
// HttpPut httpPut = new HttpPut(String.valueOf(auditURL));
//
// // Set the header properties of the request
// httpPut.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPut.setHeader(APIConstants.HEADER_CONTENT_TYPE, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPut.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
// httpPut.setEntity(new StringEntity(stringBuilder.toString()));
//
// // Code block for processing the response
// try (CloseableHttpResponse response = httpClient.execute(httpPut)) {
// if (isDebugEnabled) {
// log.debug("HTTP status " + response.getStatusLine().getStatusCode());
// }
// if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
// BufferedReader reader = new BufferedReader(
// new InputStreamReader(response.getEntity().getContent()));
// String inputLine;
// StringBuilder responseString = new StringBuilder();
//
// while ((inputLine = reader.readLine()) != null) {
// responseString.append(inputLine);
// }
//
// return Response.ok().entity(responseString.toString()).build();
// } else {
// throw new APIManagementException("Error while sending data to " + auditURL +
// ". Found http status " + response.getStatusLine());
// }
// } finally {
// httpPut.releaseConnection();
// }
// } catch (IOException e) {
// log.error("Error occurred while getting HttpClient instance");
// }
// } catch (APIManagementException e) {
// String errorMessage = "Error while updating Audit API : " + apiId;
// RestApiUtil.handleInternalServerError(errorMessage, e, log);
// } catch (ParseException e) {
// log.error("API Definition String could not be parsed into JSONObject");
// }
return null;
}
/**
* Check whether the token has APIDTO class level Scope annotation
* @return true if the token has APIDTO class level Scope annotation
*/
private boolean checkClassScopeAnnotation(org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] apiDtoClassAnnotatedScopes, String[] tokenScopes) {
for (org.wso2.carbon.apimgt.rest.api.util.annotations.Scope classAnnotation : apiDtoClassAnnotatedScopes) {
for (String tokenScope : tokenScopes) {
if (classAnnotation.name().equals(tokenScope)) {
return true;
}
}
}
return false;
}
/**
* Get the API DTO object in which the API field values are overridden with the user passed new values
* @throws APIManagementException
*/
private APIDTO getFieldOverriddenAPIDTO(APIDTO apidto, API originalAPI,
String[] tokenScopes) throws APIManagementException {
APIDTO originalApiDTO;
APIDTO updatedAPIDTO;
try {
originalApiDTO = APIMappingUtil.fromAPItoDTO(originalAPI);
Field[] fields = APIDTO.class.getDeclaredFields();
ObjectMapper mapper = new ObjectMapper();
String newApiDtoJsonString = mapper.writeValueAsString(apidto);
JSONParser parser = new JSONParser();
JSONObject newApiDtoJson = (JSONObject) parser.parse(newApiDtoJsonString);
String originalApiDtoJsonString = mapper.writeValueAsString(originalApiDTO);
JSONObject originalApiDtoJson = (JSONObject) parser.parse(originalApiDtoJsonString);
for (Field field : fields) {
org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] fieldAnnotatedScopes =
field.getAnnotationsByType(org.wso2.carbon.apimgt.rest.api.util.annotations.Scope.class);
String originalElementValue = mapper.writeValueAsString(originalApiDtoJson.get(field.getName()));
String newElementValue = mapper.writeValueAsString(newApiDtoJson.get(field.getName()));
if (!StringUtils.equals(originalElementValue, newElementValue)) {
originalApiDtoJson = overrideDTOValues(originalApiDtoJson, newApiDtoJson, field, tokenScopes,
fieldAnnotatedScopes);
}
}
updatedAPIDTO = mapper.readValue(originalApiDtoJson.toJSONString(), APIDTO.class);
} catch (IOException | ParseException e) {
String msg = "Error while processing API DTO json strings";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return updatedAPIDTO;
}
/**
* Override the API DTO field values with the user passed new values considering the field-wise scopes defined as
* allowed to update in REST API definition yaml
*/
private JSONObject overrideDTOValues(JSONObject originalApiDtoJson, JSONObject newApiDtoJson, Field field, String[]
tokenScopes, org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] fieldAnnotatedScopes) throws
APIManagementException {
for (String tokenScope : tokenScopes) {
for (org.wso2.carbon.apimgt.rest.api.util.annotations.Scope scopeAnt : fieldAnnotatedScopes) {
if (scopeAnt.name().equals(tokenScope)) {
// do the overriding
originalApiDtoJson.put(field.getName(), newApiDtoJson.get(field.getName()));
return originalApiDtoJson;
}
}
}
throw new APIManagementException("User is not authorized to update one or more API fields. None of the " +
"required scopes found in user token to update the field. So the request will be failed.");
}
/**
* Delete API
*
* @param apiId API Id
* @param ifMatch If-Match header value
* @return Status of API Deletion
*/
@Override
public Response apisApiIdDelete(String apiId, String ifMatch, MessageContext messageContext) {
try {
String username = RestApiUtil.getLoggedInUsername();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getProvider(username);
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
//check if the API has subscriptions
//Todo : need to optimize this check. This method seems too costly to check if subscription exists
List<SubscribedAPI> apiUsages = apiProvider.getAPIUsageByAPIId(apiIdentifier);
if (apiUsages != null && apiUsages.size() > 0) {
RestApiUtil.handleConflict("Cannot remove the API " + apiId + " as active subscriptions exist", log);
}
//deletes the API
apiProvider.deleteAPI(apiIdentifier);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
keyManager.deleteRegisteredResourceByAPIId(apiId);
return Response.ok().build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Retrieves the content of a document
*
* @param apiId API identifier
* @param documentId document identifier
* @param ifNoneMatch If-None-Match header value
* @return Content of the document/ either inline/file or source url as a redirection
*/
@Override
public Response apisApiIdDocumentsDocumentIdContentGet(String apiId, String documentId,
String ifNoneMatch, MessageContext messageContext) {
Documentation documentation;
try {
String username = RestApiUtil.getLoggedInUsername();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
documentation = apiProvider.getDocumentation(documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
//gets the content depending on the type of the document
if (documentation.getSourceType().equals(Documentation.DocumentSourceType.FILE)) {
String resource = documentation.getFilePath();
Map<String, Object> docResourceMap = APIUtil.getDocument(username, resource, tenantDomain);
Object fileDataStream = docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_DATA);
Object contentType = docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE);
contentType = contentType == null ? RestApiConstants.APPLICATION_OCTET_STREAM : contentType;
String name = docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME).toString();
return Response.ok(fileDataStream)
.header(RestApiConstants.HEADER_CONTENT_TYPE, contentType)
.header(RestApiConstants.HEADER_CONTENT_DISPOSITION, "attachment; filename=\"" + name + "\"")
.build();
} else if (documentation.getSourceType().equals(Documentation.DocumentSourceType.INLINE) || documentation.getSourceType().equals(Documentation.DocumentSourceType.MARKDOWN)) {
String content = apiProvider.getDocumentationContent(apiIdentifier, documentation.getName());
return Response.ok(content)
.header(RestApiConstants.HEADER_CONTENT_TYPE, APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE)
.build();
} else if (documentation.getSourceType().equals(Documentation.DocumentSourceType.URL)) {
String sourceUrl = documentation.getSourceUrl();
return Response.seeOther(new URI(sourceUrl)).build();
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving document " + documentId + " of the API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving source URI location of " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Add content to a document. Content can be inline or File
*
* @param apiId API identifier
* @param documentId document identifier
* @param inputStream file input stream
* @param fileDetail file details as Attachment
* @param inlineContent inline content for the document
* @param ifMatch If-match header value
* @return updated document as DTO
*/
@Override
public Response apisApiIdDocumentsDocumentIdContentPost(String apiId, String documentId,
InputStream inputStream, Attachment fileDetail, String inlineContent, String ifMatch,
MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API api = APIMappingUtil.getAPIInfoFromUUID(apiId, tenantDomain);
if (inputStream != null && inlineContent != null) {
RestApiUtil.handleBadRequest("Only one of 'file' and 'inlineContent' should be specified", log);
}
//retrieves the document and send 404 if not found
Documentation documentation = apiProvider.getDocumentation(documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
//add content depending on the availability of either input stream or inline content
if (inputStream != null) {
if (!documentation.getSourceType().equals(Documentation.DocumentSourceType.FILE)) {
RestApiUtil.handleBadRequest("Source type of document " + documentId + " is not FILE", log);
}
RestApiPublisherUtils.attachFileToDocument(apiId, documentation, inputStream, fileDetail);
} else if (inlineContent != null) {
if (!documentation.getSourceType().equals(Documentation.DocumentSourceType.INLINE) &&
!documentation.getSourceType().equals(Documentation.DocumentSourceType.MARKDOWN)) {
RestApiUtil.handleBadRequest("Source type of document " + documentId + " is not INLINE " +
"or MARKDOWN", log);
}
apiProvider.addDocumentationContent(api, documentation.getName(), inlineContent);
} else {
RestApiUtil.handleBadRequest("Either 'file' or 'inlineContent' should be specified", log);
}
//retrieving the updated doc and the URI
Documentation updatedDoc = apiProvider.getDocumentation(documentId, tenantDomain);
DocumentDTO documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(updatedDoc);
String uriString = RestApiConstants.RESOURCE_PATH_DOCUMENT_CONTENT
.replace(RestApiConstants.APIID_PARAM, apiId)
.replace(RestApiConstants.DOCUMENTID_PARAM, documentId);
URI uri = new URI(uriString);
return Response.created(uri).entity(documentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while adding content to the document: " + documentId + " of API "
+ apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Failed to add content to the document " + documentId, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving document content location : " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} finally {
IOUtils.closeQuietly(inputStream);
}
return null;
}
/**
* Deletes an existing document of an API
*
* @param apiId API identifier
* @param documentId document identifier
* @param ifMatch If-match header value
* @return 200 response if deleted successfully
*/
@Override
public Response apisApiIdDocumentsDocumentIdDelete(String apiId, String documentId, String ifMatch,
MessageContext messageContext) {
Documentation documentation;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
documentation = apiProvider.getDocumentation(documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
}
apiProvider.removeDocumentation(apiIdentifier, documentId);
return Response.ok().build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while deleting : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response apisApiIdDocumentsDocumentIdGet(String apiId, String documentId, String ifNoneMatch,
MessageContext messageContext) {
Documentation documentation;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
documentation = apiProvider.getDocumentation(documentId, tenantDomain);
APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
}
DocumentDTO documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentation);
return Response.ok().entity(documentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving document : " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Updates an existing document of an API
*
* @param apiId API identifier
* @param documentId document identifier
* @param body updated document DTO
* @param ifMatch If-match header value
* @return updated document DTO as response
*/
@Override
public Response apisApiIdDocumentsDocumentIdPut(String apiId, String documentId, DocumentDTO body,
String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
String sourceUrl = body.getSourceUrl();
Documentation oldDocument = apiProvider.getDocumentation(documentId, tenantDomain);
//validation checks for existence of the document
if (oldDocument == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
if (body.getType() == DocumentDTO.TypeEnum.OTHER && org.apache.commons.lang3.StringUtils.isBlank(body.getOtherTypeName())) {
//check otherTypeName for not null if doc type is OTHER
RestApiUtil.handleBadRequest("otherTypeName cannot be empty if type is OTHER.", log);
return null;
}
if (body.getSourceType() == DocumentDTO.SourceTypeEnum.URL &&
(org.apache.commons.lang3.StringUtils.isBlank(sourceUrl) || !RestApiUtil.isURL(sourceUrl))) {
RestApiUtil.handleBadRequest("Invalid document sourceUrl Format", log);
return null;
}
//overriding some properties
body.setName(oldDocument.getName());
Documentation newDocumentation = DocumentationMappingUtil.fromDTOtoDocumentation(body);
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
newDocumentation.setFilePath(oldDocument.getFilePath());
apiProvider.updateDocumentation(apiIdentifier, newDocumentation);
//retrieve the updated documentation
newDocumentation = apiProvider.getDocumentation(documentId, tenantDomain);
return Response.ok().entity(DocumentationMappingUtil.fromDocumentationToDTO(newDocumentation)).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while updating the document " + documentId + " for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Returns all the documents of the given API identifier that matches to the search condition
*
* @param apiId API identifier
* @param limit max number of records returned
* @param offset starting index
* @param ifNoneMatch If-None-Match header value
* @return matched documents as a list if DocumentDTOs
*/
@Override
public Response apisApiIdDocumentsGet(String apiId, Integer limit, Integer offset, String ifNoneMatch,
MessageContext messageContext) {
// do some magic!
//pre-processing
//setting default limit and offset values if they are not set
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
List<Documentation> allDocumentation = apiProvider.getAllDocumentation(apiIdentifier);
DocumentListDTO documentListDTO = DocumentationMappingUtil.fromDocumentationListToDTO(allDocumentation,
offset, limit);
DocumentationMappingUtil
.setPaginationParams(documentListDTO, apiId, offset, limit, allDocumentation.size());
return Response.ok().entity(documentListDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving documents of API : " + apiId, e, log);
} else {
String msg = "Error while retrieving documents of API " + apiId;
RestApiUtil.handleInternalServerError(msg, e, log);
}
}
return null;
}
/**
* Add a documentation to an API
*
* @param apiId api identifier
* @param body Documentation DTO as request body
* @return created document DTO as response
*/
@Override
public Response apisApiIdDocumentsPost(String apiId, DocumentDTO body, String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
Documentation documentation = DocumentationMappingUtil.fromDTOtoDocumentation(body);
String documentName = body.getName();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
if (body.getType() == DocumentDTO.TypeEnum.OTHER && org.apache.commons.lang3.StringUtils.isBlank(body.getOtherTypeName())) {
//check otherTypeName for not null if doc type is OTHER
RestApiUtil.handleBadRequest("otherTypeName cannot be empty if type is OTHER.", log);
}
String sourceUrl = body.getSourceUrl();
if (body.getSourceType() == DocumentDTO.SourceTypeEnum.URL &&
(org.apache.commons.lang3.StringUtils.isBlank(sourceUrl) || !RestApiUtil.isURL(sourceUrl))) {
RestApiUtil.handleBadRequest("Invalid document sourceUrl Format", log);
}
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
if (apiProvider.isDocumentationExist(apiIdentifier, documentName)) {
String errorMessage = "Requested document '" + documentName + "' already exists";
RestApiUtil.handleResourceAlreadyExistsError(errorMessage, log);
}
apiProvider.addDocumentation(apiIdentifier, documentation);
//retrieve the newly added document
String newDocumentId = documentation.getId();
documentation = apiProvider.getDocumentation(newDocumentId, tenantDomain);
DocumentDTO newDocumentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentation);
String uriString = RestApiConstants.RESOURCE_PATH_DOCUMENTS_DOCUMENT_ID
.replace(RestApiConstants.APIID_PARAM, apiId)
.replace(RestApiConstants.DOCUMENTID_PARAM, newDocumentId);
URI uri = new URI(uriString);
return Response.created(uri).entity(newDocumentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while adding documents of API : " + apiId, e,
log);
} else {
String errorMessage = "Error while adding the document for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving location for document " + body.getName() + " of API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get external store list which the given API is already published to.
* @param apiId API Identifier
* @param ifNoneMatch If-None-Match header value
* @param messageContext CXF Message Context
* @return External Store list of published API
*/
@Override
public Response getAllPublishedExternalStoresByAPI(String apiId, String ifNoneMatch, MessageContext messageContext)
throws APIManagementException {
APIIdentifier apiIdentifier = null;
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
try {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while getting API: " + apiId;
log.error(errorMessage, e);
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
Set<APIStore> publishedStores = apiProvider.getPublishedExternalAPIStores(apiIdentifier);
ExternalStoreListDTO externalStoreListDTO =
ExternalStoreMappingUtil.fromExternalStoreCollectionToDTO(publishedStores);
return Response.ok().entity(externalStoreListDTO).build();
}
/**
* Retrieves API Lifecycle history information
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @return API Lifecycle history information
*/
@Override
public Response apisApiIdLifecycleHistoryGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
List<LifeCycleEvent> lifeCycleEvents = apiProvider.getLifeCycleEvents(apiIdentifier);
LifecycleHistoryDTO historyDTO = APIMappingUtil.fromLifecycleHistoryModelToDTO(lifeCycleEvents);
return Response.ok().entity(historyDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Retrieves API Lifecycle state information
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @return API Lifecycle state information
*/
@Override
public Response apisApiIdLifecycleStateGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
LifecycleStateDTO lifecycleStateDTO = getLifecycleState(apiId);
return Response.ok().entity(lifecycleStateDTO).build();
}
/**
* Retrieves API Lifecycle state information
*
* @param apiId API Id
* @return API Lifecycle state information
*/
private LifecycleStateDTO getLifecycleState(String apiId) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
Map<String, Object> apiLCData = apiProvider.getAPILifeCycleData(apiIdentifier);
if (apiLCData == null) {
String errorMessage = "Error while getting lifecycle state for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
return APIMappingUtil.fromLifecycleModelToDTO(apiLCData);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response apisApiIdLifecycleStatePendingTasksDelete(String apiId, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesGet(String apiId, Integer limit, Integer offset, String query,
String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesMediationPolicyIdDelete(String apiId, String mediationPolicyId,
String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesMediationPolicyIdGet(String apiId, String mediationPolicyId,
String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesMediationPolicyIdPut(String apiId, String mediationPolicyId,
MediationDTO body, String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesPost(MediationDTO body, String apiId, String ifMatch,
MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
/**
* Get API monetization status and monetized tier to billing plan mapping
*
* @param apiId API ID
* @param messageContext message context
* @return API monetization status and monetized tier to billing plan mapping
*/
@Override
public Response apisApiIdMonetizationGet(String apiId, MessageContext messageContext) {
try {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when retrieving monetized plans.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
API api = apiProvider.getAPI(apiIdentifier);
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
Map<String, String> monetizedPoliciesToPlanMapping = monetizationImplementation.
getMonetizedPoliciesToPlanMapping(api);
APIMonetizationInfoDTO monetizationInfoDTO = APIMappingUtil.getMonetizedTiersDTO
(apiIdentifier, monetizedPoliciesToPlanMapping);
return Response.ok().entity(monetizationInfoDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Failed to retrieve monetized plans for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
} catch (MonetizationException e) {
String errorMessage = "Failed to fetch monetized plans of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
return Response.serverError().build();
}
/**
* Monetize (enable or disable) for a given API
*
* @param apiId API ID
* @param body request body
* @param messageContext message context
* @return monetizationDTO
*/
@Override
public Response apisApiIdMonetizePost(String apiId, APIMonetizationInfoDTO body, MessageContext messageContext) {
try {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when configuring monetization.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
API api = apiProvider.getAPI(apiIdentifier);
if (!APIConstants.PUBLISHED.equalsIgnoreCase(api.getStatus())) {
String errorMessage = "API " + apiIdentifier.getApiName() +
" should be in published state to configure monetization.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
//set the monetization status
boolean monetizationEnabled = body.isEnabled();
api.setMonetizationStatus(monetizationEnabled);
//clear the existing properties related to monetization
api.getMonetizationProperties().clear();
Map<String, String> monetizationProperties = body.getProperties();
if (MapUtils.isNotEmpty(monetizationProperties)) {
String errorMessage = RestApiPublisherUtils.validateMonetizationProperties(monetizationProperties);
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
for (Map.Entry<String, String> currentEntry : monetizationProperties.entrySet()) {
api.addMonetizationProperty(currentEntry.getKey(), currentEntry.getValue());
}
}
apiProvider.configureMonetizationInAPIArtifact(api);
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
HashMap monetizationDataMap = new Gson().fromJson(api.getMonetizationProperties().toString(), HashMap.class);
boolean isMonetizationStateChangeSuccessful = false;
if (MapUtils.isEmpty(monetizationDataMap)) {
String errorMessage = "Monetization data map is empty for API ID " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
try {
if (monetizationEnabled) {
isMonetizationStateChangeSuccessful = monetizationImplementation.enableMonetization
(tenantDomain, api, monetizationDataMap);
} else {
isMonetizationStateChangeSuccessful = monetizationImplementation.disableMonetization
(tenantDomain, api, monetizationDataMap);
}
} catch (MonetizationException e) {
String errorMessage = "Error while changing monetization status for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
if (isMonetizationStateChangeSuccessful) {
APIMonetizationInfoDTO monetizationInfoDTO = APIMappingUtil.getMonetizationInfoDTO(apiIdentifier);
return Response.ok().entity(monetizationInfoDTO).build();
} else {
String errorMessage = "Unable to change monetization status for API : " + apiId;
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while configuring monetization for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return Response.serverError().build();
}
/**
* Publish API to given external stores.
*
* @param apiId API Id
* @param externalStoreIds External Store Ids
* @param ifMatch If-match header value
* @param messageContext CXF Message Context
* @return Response of published external store list
*/
@Override
public Response publishAPIToExternalStores(String apiId, List<String> externalStoreIds, String ifMatch,
MessageContext messageContext) throws APIManagementException {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API api = null;
try {
api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while getting API: " + apiId;
log.error(errorMessage, e);
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
if (apiProvider.publishToExternalAPIStores(api, externalStoreIds)) {
Set<APIStore> publishedStores = apiProvider.getPublishedExternalAPIStores(api.getId());
ExternalStoreListDTO externalStoreListDTO =
ExternalStoreMappingUtil.fromExternalStoreCollectionToDTO(publishedStores);
return Response.ok().entity(externalStoreListDTO).build();
}
return Response.serverError().build();
}
/**
* Get the resource policies(inflow/outflow).
*
* @param apiId API ID
* @param sequenceType sequence type('in' or 'out')
* @param resourcePath api resource path
* @param verb http verb
* @param ifNoneMatch If-None-Match header value
* @return json response of the resource policies according to the resource path
*/
@Override
public Response apisApiIdResourcePoliciesGet(String apiId, String sequenceType, String resourcePath,
String verb, String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
boolean isSoapToRESTApi = SOAPOperationBindingUtils
.isSOAPToRESTApi(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName());
if (isSoapToRESTApi) {
if (StringUtils.isEmpty(sequenceType) || !(RestApiConstants.IN_SEQUENCE.equals(sequenceType)
|| RestApiConstants.OUT_SEQUENCE.equals(sequenceType))) {
String errorMessage = "Sequence type should be either of the values from 'in' or 'out'";
RestApiUtil.handleBadRequest(errorMessage, log);
}
String resourcePolicy = SequenceUtils
.getRestToSoapConvertedSequence(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName(), sequenceType);
if (StringUtils.isEmpty(resourcePath) && StringUtils.isEmpty(verb)) {
ResourcePolicyListDTO resourcePolicyListDTO = APIMappingUtil
.fromResourcePolicyStrToDTO(resourcePolicy);
return Response.ok().entity(resourcePolicyListDTO).build();
}
if (StringUtils.isNotEmpty(resourcePath) && StringUtils.isNotEmpty(verb)) {
JSONObject sequenceObj = (JSONObject) new JSONParser().parse(resourcePolicy);
JSONObject resultJson = new JSONObject();
String key = resourcePath + "_" + verb;
JSONObject sequenceContent = (JSONObject) sequenceObj.get(key);
if (sequenceContent == null) {
String errorMessage = "Cannot find any resource policy for Resource path : " + resourcePath +
" with type: " + verb;
RestApiUtil.handleResourceNotFoundError(errorMessage, log);
}
resultJson.put(key, sequenceObj.get(key));
ResourcePolicyListDTO resourcePolicyListDTO = APIMappingUtil
.fromResourcePolicyStrToDTO(resultJson.toJSONString());
return Response.ok().entity(resourcePolicyListDTO).build();
} else if (StringUtils.isEmpty(resourcePath)) {
String errorMessage = "Resource path cannot be empty for the defined verb: " + verb;
RestApiUtil.handleBadRequest(errorMessage, log);
} else if (StringUtils.isEmpty(verb)) {
String errorMessage = "HTTP verb cannot be empty for the defined resource path: " + resourcePath;
RestApiUtil.handleBadRequest(errorMessage, log);
}
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (ParseException e) {
String errorMessage = "Error while retrieving the resource policies for the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get the resource policy given the resource id.
*
* @param apiId API ID
* @param resourcePolicyId resource policy id
* @param ifNoneMatch If-None-Match header value
* @return json response of the resource policy for the resource id given
*/
@Override
public Response apisApiIdResourcePoliciesResourcePolicyIdGet(String apiId, String resourcePolicyId,
String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
boolean isSoapToRESTApi = SOAPOperationBindingUtils
.isSOAPToRESTApi(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName());
if (isSoapToRESTApi) {
if (StringUtils.isEmpty(resourcePolicyId)) {
String errorMessage = "Resource id should not be empty to update a resource policy.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
String policyContent = SequenceUtils
.getResourcePolicyFromRegistryResourceId(apiIdentifier, resourcePolicyId);
ResourcePolicyInfoDTO resourcePolicyInfoDTO = APIMappingUtil
.fromResourcePolicyStrToInfoDTO(policyContent);
return Response.ok().entity(resourcePolicyInfoDTO).build();
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Update the resource policies(inflow/outflow) given the resource id.
*
* @param apiId API ID
* @param resourcePolicyId resource policy id
* @param body resource policy content
* @param ifMatch If-Match header value
* @return json response of the updated sequence content
*/
@Override
public Response apisApiIdResourcePoliciesResourcePolicyIdPut(String apiId, String resourcePolicyId,
ResourcePolicyInfoDTO body, String ifMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
boolean isSoapToRESTApi = SOAPOperationBindingUtils
.isSOAPToRESTApi(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName());
if (isSoapToRESTApi) {
if (StringUtils.isEmpty(resourcePolicyId)) {
String errorMessage = "Resource id should not be empty to update a resource policy.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
boolean isValidSchema = RestApiPublisherUtils.validateXMLSchema(body.getContent());
if (isValidSchema) {
SequenceUtils
.updateResourcePolicyFromRegistryResourceId(apiIdentifier, resourcePolicyId, body.getContent());
String updatedPolicyContent = SequenceUtils
.getResourcePolicyFromRegistryResourceId(apiIdentifier, resourcePolicyId);
ResourcePolicyInfoDTO resourcePolicyInfoDTO = APIMappingUtil
.fromResourcePolicyStrToInfoDTO(updatedPolicyContent);
return Response.ok().entity(resourcePolicyInfoDTO).build();
} else {
String errorMessage =
"Error while validating the resource policy xml content for the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get total revenue for a given API from all its' subscriptions
*
* @param apiId API ID
* @param messageContext message context
* @return revenue data for a given API
*/
@Override
public Response apisApiIdRevenueGet(String apiId, MessageContext messageContext) {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when getting revenue details.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
API api = apiProvider.getAPI(apiIdentifier);
if (!APIConstants.PUBLISHED.equalsIgnoreCase(api.getStatus())) {
String errorMessage = "API " + apiIdentifier.getApiName() +
" should be in published state to get total revenue.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
Map<String, String> revenueUsageData = monetizationImplementation.getTotalRevenue(api, apiProvider);
APIRevenueDTO apiRevenueDTO = new APIRevenueDTO();
apiRevenueDTO.setProperties(revenueUsageData);
return Response.ok().entity(apiRevenueDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Failed to retrieve revenue data for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
} catch (MonetizationException e) {
String errorMessage = "Failed to get current revenue data for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
return null;
}
@Override
public Response apisApiIdScopesGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesNameDelete(String apiId, String name, String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesNameGet(String apiId, String name, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesNamePut(String apiId, String name, ScopeDTO body, String ifMatch,
MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesPost(String apiId, ScopeDTO body, String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
/**
* Retrieves the swagger document of an API
*
* @param apiId API identifier
* @param ifNoneMatch If-None-Match header value
* @return Swagger document of the API
*/
@Override
public Response apisApiIdSwaggerGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
String apiSwagger = apiProvider.getOpenAPIDefinition(apiIdentifier);
return Response.ok().entity(apiSwagger).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving swagger of API : " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving swagger of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Updates the swagger definition of an existing API
*
* @param apiId API identifier
* @param apiDefinition Swagger definition
* @param ifMatch If-match header value
* @return updated swagger document of the API
*/
@Override
public Response apisApiIdSwaggerPut(String apiId, String apiDefinition, String ifMatch, MessageContext messageContext) {
try {
APIDefinitionValidationResponse response = OASParserUtil
.validateAPIDefinition(apiDefinition, true);
if (!response.isValid()) {
RestApiUtil.handleBadRequest(response.getErrorItems(), log);
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
API existingAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIDefinition oasParser = response.getParser();
Set<URITemplate> uriTemplates = null;
try {
SwaggerData swaggerData = new SwaggerData(existingAPI);
uriTemplates = oasParser.getURITemplates(swaggerData, response.getJsonContent());
} catch (APIManagementException e) {
// catch APIManagementException inside again to capture validation error
RestApiUtil.handleBadRequest(e.getMessage(), log);
}
Set<Scope> scopes = oasParser.getScopes(apiDefinition);
//validating scope roles
for (Scope scope : scopes) {
String roles = scope.getRoles();
if (roles != null) {
for (String aRole : roles.split(",")) {
boolean isValidRole = APIUtil.isRoleNameExist(RestApiUtil.getLoggedInUsername(), aRole);
if (!isValidRole) {
String error = "Role '" + aRole + "' Does not exist.";
RestApiUtil.handleBadRequest(error, log);
}
}
}
}
existingAPI.setUriTemplates(uriTemplates);
existingAPI.setScopes(scopes);
//Update API is called to update URITemplates and scopes of the API
apiProvider.updateAPI(existingAPI);
SwaggerData swaggerData = new SwaggerData(existingAPI);
String updatedApiDefinition = oasParser.populateCustomManagementInfo(apiDefinition, swaggerData);
apiProvider.saveSwagger20Definition(existingAPI.getId(), updatedApiDefinition);
//retrieves the updated swagger definition
String apiSwagger = apiProvider.getOpenAPIDefinition(existingAPI.getId());
return Response.ok().entity(apiSwagger).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating swagger definition of API: " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Retrieves the thumbnail image of an API specified by API identifier
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @param messageContext If-Modified-Since header value
* @return Thumbnail image of the API
*/
@Override
public Response apisApiIdThumbnailGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
ResourceFile thumbnailResource = apiProvider.getIcon(apiIdentifier);
if (thumbnailResource != null) {
return Response
.ok(thumbnailResource.getContent(), MediaType.valueOf(thumbnailResource.getContentType()))
.build();
} else {
return Response.noContent().build();
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving thumbnail of API : " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving thumbnail of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response updateAPIThumbnail(String apiId, InputStream fileInputStream, Attachment fileDetail,
String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
String fileName = fileDetail.getDataHandler().getName();
String fileContentType = URLConnection.guessContentTypeFromName(fileName);
if (org.apache.commons.lang3.StringUtils.isBlank(fileContentType)) {
fileContentType = fileDetail.getContentType().toString();
}
//this will fail if user does not have access to the API or the API does not exist
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
ResourceFile apiImage = new ResourceFile(fileInputStream, fileContentType);
String thumbPath = APIUtil.getIconPath(api.getId());
String thumbnailUrl = apiProvider.addResourceFile(thumbPath, apiImage);
api.setThumbnailUrl(APIUtil.prependTenantPrefix(thumbnailUrl, api.getId().getProviderName()));
APIUtil.setResourcePermissions(api.getId().getProviderName(), null, null, thumbPath);
//Creating URI templates due to available uri templates in returned api object only kept single template
//for multiple http methods
String apiSwaggerDefinition = apiProvider.getOpenAPIDefinition(api.getId());
if (!org.apache.commons.lang3.StringUtils.isEmpty(apiSwaggerDefinition)) {
Optional<APIDefinition> definitionOptional = OASParserUtil.getOASParser(apiSwaggerDefinition);
if(!definitionOptional.isPresent()) {
RestApiUtil.handleInternalServerError("Error occurred while getting swagger parser.", log);
return null;
}
APIDefinition apiDefinition = definitionOptional.get();
SwaggerData swaggerData = new SwaggerData(api);
Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(swaggerData, apiSwaggerDefinition);
api.setUriTemplates(uriTemplates);
// scopes
Set<Scope> scopes = apiDefinition.getScopes(apiSwaggerDefinition);
api.setScopes(scopes);
}
apiProvider.updateAPI(api);
String uriString = RestApiConstants.RESOURCE_PATH_THUMBNAIL
.replace(RestApiConstants.APIID_PARAM, apiId);
URI uri = new URI(uriString);
FileInfoDTO infoDTO = new FileInfoDTO();
infoDTO.setRelativePath(uriString);
infoDTO.setMediaType(apiImage.getContentType());
return Response.created(uri).entity(infoDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while adding thumbnail for API : " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving thumbnail of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving thumbnail location of API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (FaultGatewaysException e) {
//This is logged and process is continued because icon is optional for an API
log.error("Failed to update API after adding icon. ", e);
} finally {
IOUtils.closeQuietly(fileInputStream);
}
return null;
}
@Override
public Response apisApiIdResourcePathsGet(String apiId, Integer limit, Integer offset, String ifNoneMatch,
MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
List<ResourcePath> apiResourcePaths = apiProvider.getResourcePathsOfAPI(apiIdentifier);
ResourcePathListDTO dto = APIMappingUtil.fromResourcePathListToDTO(apiResourcePaths, limit, offset);
APIMappingUtil.setPaginationParamsForAPIResourcePathList(dto, offset, limit, apiResourcePaths.size());
return Response.ok().entity(dto).build();
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving resource paths of API : " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving resource paths of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Validate API Definition and retrieve as the response
*
* @param url URL of the OpenAPI definition
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param returnContent Whether to return the definition content
* @param messageContext CXF message context
* @return API Definition validation response
*/
@Override
public Response validateOpenAPIDefinition(String url, InputStream fileInputStream, Attachment fileDetail,
Boolean returnContent, MessageContext messageContext) {
// Validate and retrieve the OpenAPI definition
Map validationResponseMap = null;
try {
validationResponseMap = validateOpenAPIDefinition(url, fileInputStream, returnContent);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
OpenAPIDefinitionValidationResponseDTO validationResponseDTO =
(OpenAPIDefinitionValidationResponseDTO)validationResponseMap.get(RestApiConstants.RETURN_DTO);
return Response.ok().entity(validationResponseDTO).build();
}
/**
* Importing an OpenAPI definition and create an API
*
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param url URL of the OpenAPI definition
* @param additionalProperties API object (json) including additional properties like name, version, context
* @param messageContext CXF message context
* @return API Import using OpenAPI definition response
*/
@Override
public Response importOpenAPIDefinition(InputStream fileInputStream, Attachment fileDetail, String url,
String additionalProperties, MessageContext messageContext) {
// Validate and retrieve the OpenAPI definition
Map validationResponseMap = null;
try {
validationResponseMap = validateOpenAPIDefinition(url, fileInputStream, true);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
OpenAPIDefinitionValidationResponseDTO validationResponseDTO =
(OpenAPIDefinitionValidationResponseDTO) validationResponseMap.get(RestApiConstants.RETURN_DTO);
APIDefinitionValidationResponse validationResponse =
(APIDefinitionValidationResponse) validationResponseMap.get(RestApiConstants.RETURN_MODEL);
if (!validationResponseDTO.isIsValid()) {
ErrorDTO errorDTO = APIMappingUtil.getErrorDTOFromErrorListItems(validationResponseDTO.getErrors());
throw RestApiUtil.buildBadRequestException(errorDTO);
}
// Convert the 'additionalProperties' json into an APIDTO object
ObjectMapper objectMapper = new ObjectMapper();
APIDTO apiDTOFromProperties;
try {
apiDTOFromProperties = objectMapper.readValue(additionalProperties, APIDTO.class);
} catch (IOException e) {
throw RestApiUtil.buildBadRequestException("Error while parsing 'additionalProperties'", e);
}
// Only HTTP type APIs should be allowed
if (!APIDTO.TypeEnum.HTTP.equals(apiDTOFromProperties.getType())) {
throw RestApiUtil.buildBadRequestException("The API's type should only be HTTP when " +
"importing an OpenAPI definition");
}
// Import the API and Definition
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API apiToAdd = prepareToCreateAPIByDTO(apiDTOFromProperties);
String definitionToAdd;
boolean syncOperations = apiDTOFromProperties.getOperations().size() > 0;
// Rearrange paths according to the API payload and save the OpenAPI definition
APIDefinition apiDefinition = validationResponse.getParser();
SwaggerData swaggerData = new SwaggerData(apiToAdd);
definitionToAdd = apiDefinition.generateAPIDefinition(swaggerData,
validationResponse.getJsonContent(), syncOperations);
Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(swaggerData, definitionToAdd);
Set<Scope> scopes = apiDefinition.getScopes(definitionToAdd);
apiToAdd.setUriTemplates(uriTemplates);
apiToAdd.setScopes(scopes);
// adding the API and definition
apiProvider.addAPI(apiToAdd);
apiProvider.saveSwaggerDefinition(apiToAdd, definitionToAdd);
// retrieving the added API for returning as the response
API addedAPI = apiProvider.getAPI(apiToAdd.getId());
APIDTO createdApiDTO = APIMappingUtil.fromAPItoDTO(addedAPI);
// This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Validate a provided WSDL definition via a URL or a file/zip
*
* @param url WSDL URL
* @param fileInputStream file/zip input stream
* @param fileDetail file/zip details
* @param messageContext messageContext object
* @return WSDL validation response
* @throws APIManagementException when error occurred during validation
*/
@Override
public Response validateWSDLDefinition(String url, InputStream fileInputStream, Attachment fileDetail,
MessageContext messageContext) throws APIManagementException {
handleInvalidParams(fileInputStream, url);
WSDLValidationResponseDTO responseDTO;
WSDLValidationResponse validationResponse = new WSDLValidationResponse();
if (url != null) {
try {
URL wsdlUrl = new URL(url);
validationResponse = APIMWSDLReader.validateWSDLUrl(wsdlUrl);
} catch (MalformedURLException e) {
RestApiUtil.handleBadRequest("Invalid/Malformed URL : " + url, log);
}
} else if (fileInputStream != null) {
String filename = fileDetail.getContentDisposition().getFilename();
try {
if (filename.endsWith(".zip")) {
validationResponse =
APIMWSDLReader.extractAndValidateWSDLArchive(fileInputStream);
} else if (filename.endsWith(".wsdl")) {
validationResponse = APIMWSDLReader.validateWSDLFile(fileInputStream);
} else {
RestApiUtil.handleBadRequest("Unsupported extension type of file: " + filename, log);
}
} catch (APIManagementException e) {
String errorMessage = "Internal error while validating the WSDL from file:" + filename;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
responseDTO =
APIMappingUtil.fromWSDLValidationResponseToDTO(validationResponse);
return Response.ok().entity(responseDTO).build();
}
@Override
public Response importWSDLDefinition(InputStream fileInputStream, Attachment fileDetail, String url,
String additionalProperties, String implementationType, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIDTO additionalPropertiesAPI = null;
APIDTO createdApiDTO;
URI createdApiUri;
// Minimum requirement name, version, context and endpointConfig.
additionalPropertiesAPI = new ObjectMapper().readValue(additionalProperties, APIDTO.class);
additionalPropertiesAPI.setProvider(RestApiUtil.getLoggedInUsername());
additionalPropertiesAPI.setType(APIDTO.TypeEnum.SOAPTOREST);
API apiToAdd = prepareToCreateAPIByDTO(additionalPropertiesAPI);
//adding the api
apiProvider.addAPI(apiToAdd);
boolean isSoapToRestConvertedApi = APIDTO.TypeEnum.SOAPTOREST.equals(implementationType);
// TODO: First-cut only support URL SOAPToREST remove this todo if it's not
if (isSoapToRestConvertedApi && StringUtils.isNotBlank(url)) {
if (StringUtils.isNotBlank(url)) {
String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(url);
apiProvider.saveSwagger20Definition(apiToAdd.getId(), swaggerStr);
SequenceGenerator.generateSequencesFromSwagger(swaggerStr, new Gson().toJson(additionalPropertiesAPI));
} else {
String errorMessage =
"Error while generating the swagger since the wsdl url is null for: " + apiProvider;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
}
APIIdentifier createdApiId = apiToAdd.getId();
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException | IOException | URISyntaxException e) {
return Response.serverError().entity(e.getMessage()).build();
}
}
@Override
public Response apisApiIdWsdlGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdWsdlPut(String apiId, InputStream fileInputStream, Attachment fileDetail,
String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisChangeLifecyclePost(String action, String apiId, String lifecycleChecklist,
String ifMatch, MessageContext messageContext) {
//pre-processing
String[] checkListItems = lifecycleChecklist != null ? lifecycleChecklist.split(",") : new String[0];
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
Map<String, Object> apiLCData = apiProvider.getAPILifeCycleData(apiIdentifier);
String[] nextAllowedStates = (String[]) apiLCData.get(APIConstants.LC_NEXT_STATES);
if (!ArrayUtils.contains(nextAllowedStates, action)) {
RestApiUtil.handleBadRequest(
"Action '" + action + "' is not allowed. Allowed actions are " + Arrays
.toString(nextAllowedStates), log);
}
//check and set lifecycle check list items including "Deprecate Old Versions" and "Require Re-Subscription".
for (String checkListItem : checkListItems) {
String[] attributeValPair = checkListItem.split(":");
if (attributeValPair.length == 2) {
String checkListItemName = attributeValPair[0].trim();
boolean checkListItemValue = Boolean.valueOf(attributeValPair[1].trim());
apiProvider.checkAndChangeAPILCCheckListItem(apiIdentifier, checkListItemName, checkListItemValue);
}
}
//todo: check if API's tiers are properly set before Publishing
APIStateChangeResponse stateChangeResponse = apiProvider.changeLifeCycleStatus(apiIdentifier, action);
//returns the current lifecycle state
LifecycleStateDTO stateDTO = getLifecycleState(apiId);;
WorkflowResponseDTO workflowResponseDTO = APIMappingUtil
.toWorkflowResponseDTO(stateDTO, stateChangeResponse);
return Response.ok().entity(workflowResponseDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating the lifecycle of API " + apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Error while updating lifecycle of API " + apiId, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating the API in Gateway " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisCopyApiPost(String newVersion, String apiId, Boolean defaultVersion,
MessageContext messageContext) {
URI newVersionedApiUri;
APIDTO newVersionedApi;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = api.getId();
if (defaultVersion) {
api.setAsDefaultVersion(true);
}
//creates the new version
apiProvider.createNewAPIVersion(api, newVersion);
//get newly created API to return as response
APIIdentifier apiNewVersionedIdentifier =
new APIIdentifier(apiIdentifier.getProviderName(), apiIdentifier.getApiName(), newVersion);
newVersionedApi = APIMappingUtil.fromAPItoDTO(apiProvider.getAPI(apiNewVersionedIdentifier));
//This URI used to set the location header of the POST response
newVersionedApiUri =
new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + newVersionedApi.getId());
return Response.created(newVersionedApiUri).entity(newVersionedApi).build();
} catch (APIManagementException | DuplicateAPIException e) {
if (RestApiUtil.isDueToResourceAlreadyExists(e)) {
String errorMessage = "Requested new version " + newVersion + " of API " + apiId + " already exists";
RestApiUtil.handleResourceAlreadyExistsError(errorMessage, e, log);
} else if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while copying API : " + apiId, e, log);
} else {
String errorMessage = "Error while copying API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location of " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisHead(String query, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
/**
* Import a GraphQL Schema
* @param type APIType
* @param fileInputStream input file
* @param fileDetail file Detail
* @param additionalProperties api object as string format
* @param ifMatch If--Match header value
* @param messageContext messageContext
* @return Response with GraphQL API
*/
@Override
public Response apisImportGraphqlSchemaPost(String type, InputStream fileInputStream, Attachment fileDetail,
String additionalProperties, String ifMatch,
MessageContext messageContext) {
APIDTO additionalPropertiesAPI = null;
String schema = "";
try {
if (fileInputStream == null || StringUtils.isBlank(additionalProperties)) {
String errorMessage = "GraphQL schema and api details cannot be empty.";
RestApiUtil.handleBadRequest(errorMessage, log);
} else {
schema = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
}
if (!StringUtils.isBlank(additionalProperties) && !StringUtils.isBlank(schema)) {
if (log.isDebugEnabled()) {
log.debug("Deseriallizing additionalProperties: " + additionalProperties + "/n"
+ "importing schema: " + schema);
}
}
additionalPropertiesAPI = new ObjectMapper().readValue(additionalProperties, APIDTO.class);
additionalPropertiesAPI.setType(APIDTO.TypeEnum.GRAPHQL);
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API apiToAdd = prepareToCreateAPIByDTO(additionalPropertiesAPI);
//adding the api
apiProvider.addAPI(apiToAdd);
//Save swagger definition of graphQL
APIDefinitionFromOpenAPISpec apiDefinitionUsingOASParser = new APIDefinitionFromOpenAPISpec();
SwaggerData swaggerData = new SwaggerData(apiToAdd);
String apiDefinition = apiDefinitionUsingOASParser.generateAPIDefinition(swaggerData);
apiProvider.saveSwagger20Definition(apiToAdd.getId(), apiDefinition);
APIIdentifier createdApiId = apiToAdd.getId();
apiProvider.saveGraphqlSchemaDefinition(apiToAdd, schema);
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
APIDTO createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + additionalPropertiesAPI.getProvider() + "-" +
additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + additionalPropertiesAPI.getProvider() + "-" +
additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (IOException e) {
String errorMessage = "Error while retrieving content from file : " + additionalPropertiesAPI.getProvider()
+ "-" + additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion()
+ "-" /*+ body.getEndpointConfig()*/;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Validate graphQL Schema
* @param fileInputStream input file
* @param fileDetail file Detail
* @param messageContext messageContext
* @return Validation response
*/
@Override
public Response apisValidateGraphqlSchemaPost(InputStream fileInputStream, Attachment fileDetail, MessageContext messageContext) {
String errorMessage = "";
String schema;
TypeDefinitionRegistry typeRegistry;
Set<SchemaValidationError> validationErrors;
boolean isValid = false;
SchemaParser schemaParser = new SchemaParser();
GraphQLValidationResponseDTO validationResponse = new GraphQLValidationResponseDTO();
try {
schema = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
if (schema.isEmpty()) {
errorMessage = "GraphQL Schema cannot be empty or null to validate it";
RestApiUtil.handleBadRequest(errorMessage, log);
}
typeRegistry = schemaParser.parse(schema);
GraphQLSchema graphQLSchema = UnExecutableSchemaGenerator.makeUnExecutableSchema(typeRegistry);
SchemaValidator schemaValidation = new SchemaValidator();
validationErrors = schemaValidation.validateSchema(graphQLSchema);
if (validationErrors.toArray().length > 0) {
errorMessage = "InValid Schema";
} else {
isValid = true;
validationResponse.setIsValid(isValid);
GraphQLValidationResponseGraphQLInfoDTO graphQLInfo = new GraphQLValidationResponseGraphQLInfoDTO();
List<APIOperationsDTO> operationArray = extractGraphQLOperationList(schema);
graphQLInfo.setOperations(operationArray);
GraphQLSchemaDTO schemaObj = new GraphQLSchemaDTO();
schemaObj.setSchemaDefinition(schema);
graphQLInfo.setGraphQLSchema(schemaObj);
validationResponse.setGraphQLInfo(graphQLInfo);
}
} catch (SchemaProblem | IOException e) {
errorMessage = e.getMessage();
}
if(!isValid) {
validationResponse.setIsValid(isValid);
validationResponse.setErrorMessage(errorMessage);
}
return Response.ok().entity(validationResponse).build();
}
/**
* Extract GraphQL Operations from given schema
* @param schema graphQL Schema
* @return the arrayList of APIOperationsDTO
*/
private List<APIOperationsDTO> extractGraphQLOperationList(String schema) {
List<APIOperationsDTO> operationArray = new ArrayList<>();
SchemaParser schemaParser = new SchemaParser();
TypeDefinitionRegistry typeRegistry = schemaParser.parse(schema);
Map<java.lang.String, graphql.language.TypeDefinition> operationList = typeRegistry.types();
for (Map.Entry<String, TypeDefinition> entry : operationList.entrySet()) {
if (entry.getValue().getName().equals(APIConstants.GRAPHQL_QUERY) ||
entry.getValue().getName().equals(APIConstants.GRAPHQL_MUTATION)
|| entry.getValue().getName().equals(APIConstants.GRAPHQL_SUBSCRIPTION)) {
for (FieldDefinition fieldDef : ((ObjectTypeDefinition) entry.getValue()).getFieldDefinitions()) {
APIOperationsDTO operation = new APIOperationsDTO();
operation.setVerb(entry.getKey());
operation.setTarget(fieldDef.getName());
operationArray.add(operation);
}
}
}
return operationArray;
}
@Override
public Response apisApiIdSubscriptionPoliciesGet(String apiId, String ifNoneMatch, String xWSO2Tenant,
MessageContext messageContext) {
APIDTO apiInfo = getAPIByID(apiId);
List<Tier> availableThrottlingPolicyList = new ThrottlingPoliciesApiServiceImpl()
.getThrottlingPolicyList(ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString());
if (apiInfo != null ) {
List<String> apiPolicies = apiInfo.getPolicies();
if (apiPolicies != null && !apiPolicies.isEmpty()) {
List<Tier> apiThrottlingPolicies = new ArrayList<>();
for (Tier tier : availableThrottlingPolicyList) {
if (apiPolicies.contains(tier.getName())) {
apiThrottlingPolicies.add(tier);
}
}
return Response.ok().entity(apiThrottlingPolicies).build();
}
}
return null;
}
private APIDTO getAPIByID(String apiId) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
return APIMappingUtil.fromAPItoDTO(api);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("User is not authorized to access the API", e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Validate the provided OpenAPI definition (via file or url) and return the validation response DTO
*
* @param url OpenAPI definition url
* @param fileInputStream file as input stream
* @param returnContent whether to return the content of the definition in the response DTO
* @return Map with the validation response information. A value with key 'dto' will have the response DTO
* of type OpenAPIDefinitionValidationResponseDTO for the REST API. A value with key 'model' will have the
* validation response of type APIDefinitionValidationResponse coming from the impl level.
*/
private Map validateOpenAPIDefinition(String url, InputStream fileInputStream, Boolean returnContent)
throws APIManagementException {
handleInvalidParams(fileInputStream, url);
OpenAPIDefinitionValidationResponseDTO responseDTO;
APIDefinitionValidationResponse validationResponse = new APIDefinitionValidationResponse();
if (url != null) {
validationResponse = OASParserUtil.validateAPIDefinitionByURL(url, returnContent);
} else if (fileInputStream != null) {
try {
String openAPIContent = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
validationResponse = OASParserUtil.validateAPIDefinition(openAPIContent, returnContent);
} catch (IOException e) {
RestApiUtil.handleInternalServerError("Error while reading file content", e, log);
}
}
responseDTO = APIMappingUtil.getOpenAPIDefinitionValidationResponseFromModel(validationResponse,
returnContent);
Map response = new HashMap();
response.put(RestApiConstants.RETURN_MODEL, validationResponse);
response.put(RestApiConstants.RETURN_DTO, responseDTO);
return response;
}
/**
* Validate API import definition/validate definition parameters
*
* @param fileInputStream file content stream
* @param url URL of the definition
*/
private void handleInvalidParams(InputStream fileInputStream, String url) {
String msg = "";
if (url == null && fileInputStream == null) {
msg = "Either 'file' or 'url' should be specified";
}
if (fileInputStream != null && url != null) {
msg = "Only one of 'file' and 'url' should be specified";
}
if (StringUtils.isNotBlank(msg)) {
RestApiUtil.handleBadRequest(msg, log);
}
}
/**
* This method is used to assign micro gateway labels to the DTO
*
* @param apiDTO API DTO
* @param api the API object
* @return the API object with labels
*/
private API assignLabelsToDTO(APIDTO apiDTO, API api) {
if (apiDTO.getLabels() != null) {
List<LabelDTO> dtoLabels = apiDTO.getLabels();
List<Label> labelList = new ArrayList<>();
for (LabelDTO labelDTO : dtoLabels) {
Label label = new Label();
label.setName(labelDTO.getName());
// label.setDescription(labelDTO.getDescription()); todo add description
labelList.add(label);
}
api.setGatewayLabels(labelList);
}
return api;
}
/**
* To check whether a particular exception is due to access control restriction.
*
* @param e Exception object.
* @return true if the the exception is caused due to authorization failure.
*/
private boolean isAuthorizationFailure(Exception e) {
String errorMessage = e.getMessage();
return errorMessage != null && errorMessage.contains(APIConstants.UN_AUTHORIZED_ERROR_MESSAGE);
}
}
| components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/impl/ApisApiServiceImpl.java | /*
* Copyright (c) 2019 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.rest.api.publisher.v1.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import graphql.language.FieldDefinition;
import graphql.language.ObjectTypeDefinition;
import graphql.language.TypeDefinition;
import graphql.schema.GraphQLSchema;
import graphql.schema.idl.SchemaParser;
import graphql.schema.idl.TypeDefinitionRegistry;
import graphql.schema.idl.UnExecutableSchemaGenerator;
import graphql.schema.idl.errors.SchemaProblem;
import graphql.schema.validation.SchemaValidationError;
import graphql.schema.validation.SchemaValidator;
import org.apache.axiom.util.base64.Base64Utils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.cxf.phase.PhaseInterceptorChain;
import org.apache.cxf.jaxrs.ext.multipart.Attachment;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.wso2.carbon.apimgt.api.APIDefinition;
import org.wso2.carbon.apimgt.api.APIDefinitionValidationResponse;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIProvider;
import org.wso2.carbon.apimgt.api.FaultGatewaysException;
import org.wso2.carbon.apimgt.api.MonetizationException;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIStateChangeResponse;
import org.wso2.carbon.apimgt.api.model.APIStore;
import org.wso2.carbon.apimgt.api.model.AccessTokenInfo;
import org.wso2.carbon.apimgt.api.model.Documentation;
import org.wso2.carbon.apimgt.api.model.DuplicateAPIException;
import org.wso2.carbon.apimgt.api.model.KeyManager;
import org.wso2.carbon.apimgt.api.model.Label;
import org.wso2.carbon.apimgt.api.model.LifeCycleEvent;
import org.wso2.carbon.apimgt.api.model.Monetization;
import org.wso2.carbon.apimgt.api.model.ResourceFile;
import org.wso2.carbon.apimgt.api.model.ResourcePath;
import org.wso2.carbon.apimgt.api.model.Scope;
import org.wso2.carbon.apimgt.api.model.SubscribedAPI;
import org.wso2.carbon.apimgt.api.model.SwaggerData;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.api.model.URITemplate;
import org.wso2.carbon.apimgt.api.model.policy.APIPolicy;
import org.wso2.carbon.apimgt.api.model.policy.Policy;
import org.wso2.carbon.apimgt.api.model.policy.PolicyConstants;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.APIManagerConfiguration;
import org.wso2.carbon.apimgt.impl.GZIPUtils;
import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO;
import org.wso2.carbon.apimgt.impl.definitions.APIDefinitionFromOpenAPISpec;
import org.wso2.carbon.apimgt.impl.definitions.OAS2Parser;
import org.wso2.carbon.apimgt.impl.definitions.OAS3Parser;
import org.wso2.carbon.apimgt.impl.definitions.OASParserUtil;
import org.wso2.carbon.apimgt.impl.factory.KeyManagerHolder;
import org.wso2.carbon.apimgt.impl.internal.ServiceReferenceHolder;
import org.wso2.carbon.apimgt.impl.utils.APIVersionStringComparator;
import org.wso2.carbon.apimgt.impl.wsdl.SequenceGenerator;
import org.wso2.carbon.apimgt.impl.wsdl.model.WSDLValidationResponse;
import org.wso2.carbon.apimgt.impl.wsdl.util.SOAPOperationBindingUtils;
import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.impl.wsdl.util.SequenceUtils;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.ApisApiService;
import java.io.*;
import java.lang.reflect.Field;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URISyntaxException;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Map;
import org.apache.cxf.jaxrs.ext.multipart.Attachment;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.*;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.RestApiPublisherUtils;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.APIMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.DocumentationMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.ExternalStoreMappingUtil;
import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants;
import org.wso2.carbon.apimgt.rest.api.util.dto.ErrorDTO;
import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import java.util.Optional;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
public class ApisApiServiceImpl implements ApisApiService {
private static final Log log = LogFactory.getLog(ApisApiServiceImpl.class);
@Override
public Response apisGet(Integer limit, Integer offset, String xWSO2Tenant, String query,
String ifNoneMatch, Boolean expand, String accept ,String tenantDomain, MessageContext messageContext) {
List<API> allMatchedApis = new ArrayList<>();
APIListDTO apiListDTO;
//pre-processing
//setting default limit and offset values if they are not set
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
query = query == null ? "" : query;
expand = (expand != null && expand) ? true : false;
try {
String newSearchQuery = APIUtil.constructNewSearchQuery(query);
//revert content search back to normal search by name to avoid doc result complexity and to comply with REST api practices
if (newSearchQuery.startsWith(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + "=")) {
newSearchQuery = newSearchQuery
.replace(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + "=", APIConstants.NAME_TYPE_PREFIX + "=");
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
// We should send null as the provider, Otherwise searchAPIs will return all APIs of the provider
// instead of looking at type and query
String username = RestApiUtil.getLoggedInUsername();
tenantDomain = MultitenantUtils.getTenantDomain(APIUtil.replaceEmailDomainBack(username));
boolean migrationMode = Boolean.getBoolean(RestApiConstants.MIGRATION_MODE);
/*if (migrationMode) { // migration flow
if (!StringUtils.isEmpty(targetTenantDomain)) {
tenantDomain = targetTenantDomain;
}
RestApiUtil.handleMigrationSpecificPermissionViolations(tenantDomain, username);
}*/
Map<String, Object> result = apiProvider.searchPaginatedAPIs(newSearchQuery, tenantDomain,
offset, limit, false);
Set<API> apis = (Set<API>) result.get("apis");
allMatchedApis.addAll(apis);
apiListDTO = APIMappingUtil.fromAPIListToDTO(allMatchedApis, expand);
//Add pagination section in the response
Object totalLength = result.get("length");
Integer length = 0;
if (totalLength != null) {
length = (Integer) totalLength;
}
APIMappingUtil.setPaginationParams(apiListDTO, query, offset, limit, length);
if (APIConstants.APPLICATION_GZIP.equals(accept)) {
try {
File zippedResponse = GZIPUtils.constructZippedResponse(apiListDTO);
return Response.ok().entity(zippedResponse)
.header("Content-Disposition", "attachment").
header("Content-Encoding", "gzip").build();
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(e.getMessage(), e, log);
}
} else {
return Response.ok().entity(apiListDTO).build();
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving APIs";
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisPost(APIDTO body, String oasVersion, MessageContext messageContext) {
URI createdApiUri;
APIDTO createdApiDTO;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String username = RestApiUtil.getLoggedInUsername();
boolean isGraphQL = APIDTO.TypeEnum.GRAPHQL == body.getType();
boolean isWSAPI = APIDTO.TypeEnum.WS == body.getType();
boolean isSoapToRestConvertedApi = APIDTO.TypeEnum.SOAPTOREST == body.getType();
// validate web socket api endpoint configurations
if (isWSAPI && !RestApiPublisherUtils.isValidWSAPI(body)) {
RestApiUtil.handleBadRequest("Endpoint URLs should be valid web socket URLs", log);
}
API apiToAdd = prepareToCreateAPIByDTO(body);
//adding the api
apiProvider.addAPI(apiToAdd);
if (isSoapToRestConvertedApi) {
if (StringUtils.isNotBlank(apiToAdd.getWsdlUrl())) {
String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(body.getWsdlUri());
apiProvider.saveSwaggerDefinition(apiToAdd, swaggerStr);
SequenceGenerator.generateSequencesFromSwagger(swaggerStr, new Gson().toJson(body));
} else {
String errorMessage =
"Error while generating the swagger since the wsdl url is null for: " + body.getProvider()
+ "-" + body.getName() + "-" + body.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, log);
}
} else if (!isWSAPI) {
APIDefinition oasParser;
if(RestApiConstants.OAS_VERSION_2.equalsIgnoreCase(oasVersion)) {
oasParser = new OAS2Parser();
} else {
oasParser = new OAS3Parser();
}
SwaggerData swaggerData = new SwaggerData(apiToAdd);
String apiDefinition = oasParser.generateAPIDefinition(swaggerData);
apiProvider.saveSwaggerDefinition(apiToAdd, apiDefinition);
}
APIIdentifier createdApiId = apiToAdd.getId();
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Prepares the API Model object to be created using the DTO object
*
* @param body APIDTO of the API
* @return API object to be created
* @throws APIManagementException Error while creating the API
*/
private API prepareToCreateAPIByDTO(APIDTO body) throws APIManagementException {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String username = RestApiUtil.getLoggedInUsername();
List<String> apiSecuritySchemes = body.getSecurityScheme();//todo check list vs string
if (!apiProvider.isClientCertificateBasedAuthenticationConfigured() && apiSecuritySchemes != null) {
for (String apiSecurityScheme : apiSecuritySchemes) {
if (apiSecurityScheme.contains(APIConstants.API_SECURITY_MUTUAL_SSL)) {
RestApiUtil.handleBadRequest("Mutual SSL Based authentication is not supported in this server", log);
}
}
}
if (body.getAccessControlRoles() != null) {
String errorMessage = RestApiPublisherUtils.validateUserRoles(body.getAccessControlRoles());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getAdditionalProperties() != null) {
String errorMessage = RestApiPublisherUtils
.validateAdditionalProperties(body.getAdditionalProperties());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getContext() == null) {
RestApiUtil.handleBadRequest("Parameter: \"context\" cannot be null", log);
} else if (body.getContext().endsWith("/")) {
RestApiUtil.handleBadRequest("Context cannot end with '/' character", log);
}
if (apiProvider.isApiNameWithDifferentCaseExist(body.getName())) {
RestApiUtil.handleBadRequest("Error occurred while adding API. API with name " + body.getName()
+ " already exists.", log);
}
//Get all existing versions of api been adding
List<String> apiVersions = apiProvider.getApiVersionsMatchingApiName(body.getName(), username);
if (apiVersions.size() > 0) {
//If any previous version exists
for (String version : apiVersions) {
if (version.equalsIgnoreCase(body.getVersion())) {
//If version already exists
if (apiProvider.isDuplicateContextTemplate(body.getContext())) {
RestApiUtil.handleResourceAlreadyExistsError("Error occurred while " +
"adding the API. A duplicate API already exists for "
+ body.getName() + "-" + body.getVersion(), log);
} else {
RestApiUtil.handleBadRequest("Error occurred while adding API. API with name " +
body.getName() + " already exists with different " +
"context", log);
}
}
}
} else {
//If no any previous version exists
if (apiProvider.isDuplicateContextTemplate(body.getContext())) {
RestApiUtil.handleBadRequest("Error occurred while adding the API. A duplicate API context " +
"already exists for " + body.getContext(), log);
}
}
//Check if the user has admin permission before applying a different provider than the current user
String provider = body.getProvider();
if (!StringUtils.isBlank(provider) && !provider.equals(username)) {
if (!APIUtil.hasPermission(username, APIConstants.Permissions.APIM_ADMIN)) {
if (log.isDebugEnabled()) {
log.debug("User " + username + " does not have admin permission ("
+ APIConstants.Permissions.APIM_ADMIN + ") hence provider (" +
provider + ") overridden with current user (" + username + ")");
}
provider = username;
}
} else {
//Set username in case provider is null or empty
provider = username;
}
List<String> tiersFromDTO = body.getPolicies();
//check whether the added API's tiers are all valid
Set<Tier> definedTiers = apiProvider.getTiers();
List<String> invalidTiers = RestApiUtil.getInvalidTierNames(definedTiers, tiersFromDTO);
if (invalidTiers.size() > 0) {
RestApiUtil.handleBadRequest(
"Specified tier(s) " + Arrays.toString(invalidTiers.toArray()) + " are invalid", log);
}
APIPolicy apiPolicy = apiProvider.getAPIPolicy(username, body.getApiThrottlingPolicy());
if (apiPolicy == null && body.getApiThrottlingPolicy() != null) {
RestApiUtil.handleBadRequest(
"Specified policy " + body.getApiThrottlingPolicy() + " is invalid", log);
}
API apiToAdd = APIMappingUtil.fromDTOtoAPI(body, provider);
//Overriding some properties:
//only allow CREATED as the stating state for the new api if not status is PROTOTYPED
if (!APIConstants.PROTOTYPED.equals(apiToAdd.getStatus())) {
apiToAdd.setStatus(APIConstants.CREATED);
}
//we are setting the api owner as the logged in user until we support checking admin privileges and assigning
// the owner as a different user
apiToAdd.setApiOwner(provider);
//attach micro-geteway labels
assignLabelsToDTO(body,apiToAdd);
// set default API Level Policy
if (StringUtils.isBlank(apiToAdd.getApiLevelPolicy())) {
Policy[] apiPolicies = apiProvider.getPolicies(username, PolicyConstants.POLICY_LEVEL_API);
if (apiPolicies.length > 0) {
for (Policy policy : apiPolicies) {
if (policy.getPolicyName().equals(APIConstants.UNLIMITED_TIER)) {
apiToAdd.setApiLevelPolicy(APIConstants.UNLIMITED_TIER);
break;
}
}
if (StringUtils.isBlank(apiToAdd.getApiLevelPolicy())) {
apiToAdd.setApiLevelPolicy(apiPolicies[0].getPolicyName());
}
}
}
return apiToAdd;
}
@Override
public Response apisApiIdGet(String apiId, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) {
APIDTO apiToReturn = getAPIByID(apiId);
return Response.ok().entity(apiToReturn).build();
}
/**
* Get GraphQL Schema of given API
*
* @param apiId apiId
* @param accept
* @param ifNoneMatch If--Match header value
* @param messageContext message context
* @return Response with GraphQL Schema
*/
@Override
public Response apisApiIdGraphqlSchemaGet(String apiId, String accept, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId,
tenantDomain);
String schemaContent = apiProvider.getGraphqlSchema(apiIdentifier);
GraphQLSchemaDTO dto = new GraphQLSchemaDTO();
dto.setSchemaDefinition(schemaContent);
dto.setName(apiIdentifier.getProviderName() + APIConstants.GRAPHQL_SCHEMA_PROVIDER_SEPERATOR +
apiIdentifier.getApiName() + apiIdentifier.getVersion() + APIConstants.GRAPHQL_SCHEMA_FILE_EXTENSION);
return Response.ok().entity(dto).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving schema of API: " + apiId, e,
log);
} else {
String errorMessage = "Error while retrieving schema of API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Update GraphQL Schema
* @param apiId api Id
* @param schemaDefinition graphQL schema definition
* @param ifMatch
* @param messageContext
* @return
*/
@Override
public Response apisApiIdGraphqlSchemaPut(String apiId, String schemaDefinition, String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId,
tenantDomain);
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
List<APIOperationsDTO> operationArray = extractGraphQLOperationList(schemaDefinition);
Set<URITemplate> uriTemplates = APIMappingUtil.getURITemplates(originalAPI, operationArray);
originalAPI.setUriTemplates(uriTemplates);
apiProvider.saveGraphqlSchemaDefinition(originalAPI, schemaDefinition);
apiProvider.updateAPI(originalAPI);
String schema = apiProvider.getGraphqlSchema(apiIdentifier);
return Response.ok().entity(schema).build();
} catch (APIManagementException | FaultGatewaysException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving schema of API: " + apiId, e,
log);
} else {
String errorMessage = "Error while uploading schema of the API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response apisApiIdPut(String apiId, APIDTO body, String ifMatch, MessageContext messageContext) {
APIDTO updatedApiDTO;
String[] tokenScopes =
(String[]) PhaseInterceptorChain.getCurrentMessage().getExchange().get(RestApiConstants.USER_REST_API_SCOPES);
// Validate if the USER_REST_API_SCOPES is not set in WebAppAuthenticator when scopes are validated
if (tokenScopes == null) {
RestApiUtil.handleInternalServerError("Error occurred while updating the API " + apiId +
" as the token information hasn't been correctly set internally", log);
return null;
}
try {
String username = RestApiUtil.getLoggedInUsername();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getProvider(username);
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = originalAPI.getId();
boolean isWSAPI = originalAPI.getType() != null && APIConstants.APITransportType.WS == APIConstants.APITransportType
.valueOf(originalAPI.getType());
org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] apiDtoClassAnnotatedScopes =
APIDTO.class.getAnnotationsByType(org.wso2.carbon.apimgt.rest.api.util.annotations.Scope.class);
boolean hasClassLevelScope = checkClassScopeAnnotation(apiDtoClassAnnotatedScopes, tokenScopes);
if (!hasClassLevelScope) {
// Validate per-field scopes
body = getFieldOverriddenAPIDTO(body, originalAPI, tokenScopes);
}
//Overriding some properties:
body.setName(apiIdentifier.getApiName());
body.setVersion(apiIdentifier.getVersion());
body.setProvider(apiIdentifier.getProviderName());
body.setContext(originalAPI.getContextTemplate());
body.setLifeCycleStatus(originalAPI.getStatus());
body.setType(APIDTO.TypeEnum.fromValue(originalAPI.getType()));
// Validate API Security
List<String> apiSecurity = body.getSecurityScheme();
if (!apiProvider.isClientCertificateBasedAuthenticationConfigured() && apiSecurity != null && apiSecurity
.contains(APIConstants.API_SECURITY_MUTUAL_SSL)) {
RestApiUtil.handleBadRequest("Mutual SSL based authentication is not supported in this server.", log);
}
//validation for tiers
List<String> tiersFromDTO = body.getPolicies();
if (tiersFromDTO == null || tiersFromDTO.isEmpty()) {
RestApiUtil.handleBadRequest("No tier defined for the API", log);
}
//check whether the added API's tiers are all valid
Set<Tier> definedTiers = apiProvider.getTiers();
List<String> invalidTiers = RestApiUtil.getInvalidTierNames(definedTiers, tiersFromDTO);
if (invalidTiers.size() > 0) {
RestApiUtil.handleBadRequest(
"Specified tier(s) " + Arrays.toString(invalidTiers.toArray()) + " are invalid", log);
}
if (body.getAccessControlRoles() != null) {
String errorMessage = RestApiPublisherUtils.validateUserRoles(body.getAccessControlRoles());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
if (body.getAdditionalProperties() != null) {
String errorMessage = RestApiPublisherUtils
.validateAdditionalProperties(body.getAdditionalProperties());
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
}
API apiToUpdate = APIMappingUtil.fromDTOtoAPI(body, apiIdentifier.getProviderName());
apiToUpdate.setThumbnailUrl(originalAPI.getThumbnailUrl());
//attach micro-geteway labels
assignLabelsToDTO(body, apiToUpdate);
apiProvider.manageAPI(apiToUpdate);
if (!isWSAPI) {
String oldDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
Optional<APIDefinition> definitionOptional = OASParserUtil.getOASParser(oldDefinition);
if(!definitionOptional.isPresent()) {
RestApiUtil.handleInternalServerError("Error occurred while getting swagger parser.", log);
return null;
}
APIDefinition apiDefinition = definitionOptional.get();
SwaggerData swaggerData = new SwaggerData(apiToUpdate);
String newDefinition = apiDefinition.generateAPIDefinition(swaggerData, oldDefinition,
true);
apiProvider.saveSwagger20Definition(apiToUpdate.getId(), newDefinition);
}
API updatedApi = apiProvider.getAPI(apiIdentifier);
updatedApiDTO = APIMappingUtil.fromAPItoDTO(updatedApi);
return Response.ok().entity(updatedApiDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while updating API : " + apiId, e, log);
} else {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisApiIdAuditapiGet(String apiId, String accept, MessageContext messageContext) {
boolean isDebugEnabled = log.isDebugEnabled();
try {
String username = RestApiUtil.getLoggedInUsername();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getProvider(username);
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = api.getId();
String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
// Get configuration file and retrieve API token
APIManagerConfiguration config = ServiceReferenceHolder.getInstance()
.getAPIManagerConfigurationService().getAPIManagerConfiguration();
String apiToken = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN);
// Retrieve the uuid from the database
String uuid = ApiMgtDAO.getInstance().getAuditApiId(apiIdentifier);
// Initiate JSON Parser
JSONParser parser = new JSONParser();
// TODO - Remove if not needed
// JSONObject jsonObject;
//
// // Parse JSON String of API Definition
// jsonObject = (JSONObject) parser.parse(apiDefinition);
if (uuid != null) {
// PUT Request
// Set the property to be attached in the body of the request
// Attach API Definition to property called specfile to be sent in the request
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("{\n");
stringBuilder.append(" \"specfile\": \"").append(Base64Utils.encode(apiDefinition.getBytes("UTF-8"))).append("\" \n");
stringBuilder.append("}");
// Logic for HTTP Request
String putUrl = "https://platform.42crunch.com/api/v1/apis/" + uuid;
org.apache.axis2.util.URL updateApiUrl = new org.apache.axis2.util.URL(putUrl);
try (CloseableHttpClient httpClient = (CloseableHttpClient) APIUtil.getHttpClient(updateApiUrl.getPort(), updateApiUrl.getProtocol())) {
HttpPut httpPut = new HttpPut(putUrl);
// Set the header properties of the request
httpPut.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpPut.setHeader(APIConstants.HEADER_CONTENT_TYPE, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpPut.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
httpPut.setEntity(new StringEntity(stringBuilder.toString()));
// Code block for processing the response
try (CloseableHttpResponse response = httpClient.execute(httpPut)) {
if (isDebugEnabled) {
log.debug("HTTP status " + response.getStatusLine().getStatusCode());
}
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
// BufferedReader reader = new BufferedReader(
// new InputStreamReader(response.getEntity().getContent()));
// String inputLine;
// StringBuilder responseString = new StringBuilder();
//
// while ((inputLine = reader.readLine()) != null) {
// responseString.append(inputLine);
// }
log.info("API Definition successfully updated");
} else {
throw new APIManagementException("Error while sending data to " + putUrl +
". Found http status " + response.getStatusLine());
}
} finally {
httpPut.releaseConnection();
}
}
}
// Logic for the HTTP request
String getUrl = "https://platform.42crunch.com/api/v1/apis/" + uuid + "/assessmentreport";
org.apache.axis2.util.URL getReportUrl = new org.apache.axis2.util.URL(getUrl);
try (CloseableHttpClient getHttpClient = (CloseableHttpClient) APIUtil.getHttpClient(getReportUrl.getPort(), getReportUrl.getProtocol())) {
HttpGet httpGet = new HttpGet(getUrl);
// Set the header properties of the request
httpGet.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpGet.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
// Code block for the processing of the response
try (CloseableHttpResponse response = getHttpClient.execute(httpGet)) {
if (isDebugEnabled) {
log.debug("HTTP status " + response.getStatusLine().getStatusCode());
}
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
BufferedReader reader = new BufferedReader(
new InputStreamReader(response.getEntity().getContent()));
String inputLine;
StringBuilder responseString = new StringBuilder();
while ((inputLine = reader.readLine()) != null) {
responseString.append(inputLine);
}
JSONObject responseJson = (JSONObject) new JSONParser().parse(responseString.toString());
String report = responseJson.get("data").toString();
String grade = (String) ((JSONObject) ((JSONObject) responseJson.get("attr")).get("data")).get("grade");
Integer numErrors = Integer.valueOf((String) ((JSONObject) ((JSONObject) responseJson.get("attr")).get("data")).get("numErrors"));
String decodedReport = new String(Base64Utils.decode(report));
AuditReportDTO auditReportDTO = new AuditReportDTO();
auditReportDTO.setReport(decodedReport);
auditReportDTO.setGrade(grade);
auditReportDTO.setNumErrors(numErrors);
return Response.ok().entity(auditReportDTO).build();
}
}
}
} catch (IOException e) {
log.error("Error occurred while getting HttpClient instance");
} catch (ParseException e) {
log.error("API Definition String could not be parsed into JSONObject.");
} catch (APIManagementException e) {
String errorMessage = "Error while Auditing API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisApiIdAuditapiPost(String apiId, APISecurityAuditInfoDTO body, String accept, MessageContext messageContext) {
// TODO - This method is to be removed in favour of apisApiIdAuditapiGet
// boolean isDebugEnabled = log.isDebugEnabled();
// try {
// String username = RestApiUtil.getLoggedInUsername();
// String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
// APIProvider apiProvider = RestApiUtil.getProvider(username);
// API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
// APIIdentifier apiIdentifier = api.getId();
// String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
//
// // Get configuration file and retrieve API token and Collection ID
// APIManagerConfiguration config = ServiceReferenceHolder.getInstance()
// .getAPIManagerConfigurationService().getAPIManagerConfiguration();
// String apiToken = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN);
// String collectionId = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_CID);
//
// // Initiate JSON parser.
// JSONParser parser = new JSONParser();
// JSONObject jsonObject;
//
// // Parse JSON String of API Definition
// jsonObject = (JSONObject) parser.parse(apiDefinition);
//
// // Set properties to be attached in the body of the request
// body.setName(apiIdentifier.getApiName());
// body.setCid(collectionId);
// body.setSpecfile(jsonObject);
//
// // Logic for HTTP Request
// URL auditUrl = new URL("https://platform.42crunch.com/api/v1/apis");
// try (CloseableHttpClient httpClient = (CloseableHttpClient) APIUtil.getHttpClient(auditUrl.getPort(), auditUrl.getProtocol())) {
// HttpPost httpPost = new HttpPost(String.valueOf(auditUrl));
//
// // Construct the JSON String to be passed in the request
// StringBuilder bodyString = new StringBuilder();
// bodyString.append("{ \n");
// bodyString.append(" \"specfile\": ").append(body.getSpecfile()).append("\n");
// bodyString.append(" \"cid\": ").append(body.getCid()).append("\n");
// bodyString.append(" \"name\": ").append(body.getName()).append("\n");
// bodyString.append("}");
//
// // Set the header properties of the request
// httpPost.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPost.setHeader(APIConstants.HEADER_CONTENT_TYPE, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPost.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
// httpPost.setEntity(new StringEntity(bodyString.toString()));
//
// // Code block for the processing of the response
// try(CloseableHttpResponse response = httpClient.execute(httpPost)) {
// if (isDebugEnabled) {
// log.debug("HTTP status " + response.getStatusLine().getStatusCode());
// }
// if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
// BufferedReader reader = new BufferedReader(
// new InputStreamReader(response.getEntity().getContent()));
// String inputLine;
// StringBuilder responseString = new StringBuilder();
//
// while((inputLine = reader.readLine()) != null) {
// responseString.append(inputLine);
// }
// JSONObject responseObject;
// responseObject = (JSONObject) parser.parse(responseString.toString());
// String newAuditAPIId = (String)((JSONObject) responseObject.get("desc")).get("id");
// ApiMgtDAO.getInstance().addAuditApiMapping(apiIdentifier, newAuditAPIId);
//
// return Response.ok().entity(newAuditAPIId).build();
// } else {
// throw new APIManagementException(
// "Error while retrieving data from " + auditUrl + ". Found http status " + response
// .getStatusLine());
// }
// } finally {
// httpPost.releaseConnection();
// }
// } catch (IOException e) {
// log.error("Error occurred while getting HttpClient instance");
// }
// } catch (APIManagementException e) {
// String errorMessage = "Error while creating new Audit API : " + apiId;
// RestApiUtil.handleInternalServerError(errorMessage, e, log);
// } catch (ParseException e) {
// log.error("API Definition String could not be parsed into JSONObject.");
// }
return null;
}
@Override
public Response apisApiIdAuditapiPut(String apiId, String accept, MessageContext messageContext) {
// TODO - This method is to be removed in favour of apisApiIdAuditapiGet
// boolean isDebugEnabled = log.isDebugEnabled();
// try {
// String username = RestApiUtil.getLoggedInUsername();
// String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
// APIProvider apiProvider = RestApiUtil.getProvider(username);
// API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
// APIIdentifier apiIdentifier = api.getId();
// String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier);
//
// // Get configuration file and retrieve API token
// APIManagerConfiguration config = ServiceReferenceHolder.getInstance()
// .getAPIManagerConfigurationService().getAPIManagerConfiguration();
// String apiToken = config.getFirstProperty(APIConstants.API_SECURITY_AUDIT_API_TOKEN);
//
// // Initiate JSON Parser
// JSONParser parser = new JSONParser();
// JSONObject jsonObject;
//
// // Parse JSON String of API Definition
// jsonObject = (JSONObject) parser.parse(apiDefinition);
//
// // Set the property to be attached in the body of the request
// // Attach API Definition to property called specfile to be sent in the request
// StringBuilder stringBuilder = new StringBuilder();
// stringBuilder.append("{\n");
// stringBuilder.append(" \"specfile\": ").append(jsonObject).append("\n");
// stringBuilder.append("}");
//
// // Retrieve the uuid from the database
// String uuid = ApiMgtDAO.getInstance().getAuditApiId(apiIdentifier);
//
// // Logic for HTTP Request
// URL auditURL = new URL("https://platform.42crunch.com/api/v1/apis/" + uuid);
// try (CloseableHttpClient httpClient = (CloseableHttpClient) APIUtil.getHttpClient(auditURL.getPort(), auditURL.getProtocol())) {
// HttpPut httpPut = new HttpPut(String.valueOf(auditURL));
//
// // Set the header properties of the request
// httpPut.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPut.setHeader(APIConstants.HEADER_CONTENT_TYPE, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
// httpPut.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
// httpPut.setEntity(new StringEntity(stringBuilder.toString()));
//
// // Code block for processing the response
// try (CloseableHttpResponse response = httpClient.execute(httpPut)) {
// if (isDebugEnabled) {
// log.debug("HTTP status " + response.getStatusLine().getStatusCode());
// }
// if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
// BufferedReader reader = new BufferedReader(
// new InputStreamReader(response.getEntity().getContent()));
// String inputLine;
// StringBuilder responseString = new StringBuilder();
//
// while ((inputLine = reader.readLine()) != null) {
// responseString.append(inputLine);
// }
//
// return Response.ok().entity(responseString.toString()).build();
// } else {
// throw new APIManagementException("Error while sending data to " + auditURL +
// ". Found http status " + response.getStatusLine());
// }
// } finally {
// httpPut.releaseConnection();
// }
// } catch (IOException e) {
// log.error("Error occurred while getting HttpClient instance");
// }
// } catch (APIManagementException e) {
// String errorMessage = "Error while updating Audit API : " + apiId;
// RestApiUtil.handleInternalServerError(errorMessage, e, log);
// } catch (ParseException e) {
// log.error("API Definition String could not be parsed into JSONObject");
// }
return null;
}
/**
* Check whether the token has APIDTO class level Scope annotation
* @return true if the token has APIDTO class level Scope annotation
*/
private boolean checkClassScopeAnnotation(org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] apiDtoClassAnnotatedScopes, String[] tokenScopes) {
for (org.wso2.carbon.apimgt.rest.api.util.annotations.Scope classAnnotation : apiDtoClassAnnotatedScopes) {
for (String tokenScope : tokenScopes) {
if (classAnnotation.name().equals(tokenScope)) {
return true;
}
}
}
return false;
}
/**
* Get the API DTO object in which the API field values are overridden with the user passed new values
* @throws APIManagementException
*/
private APIDTO getFieldOverriddenAPIDTO(APIDTO apidto, API originalAPI,
String[] tokenScopes) throws APIManagementException {
APIDTO originalApiDTO;
APIDTO updatedAPIDTO;
try {
originalApiDTO = APIMappingUtil.fromAPItoDTO(originalAPI);
Field[] fields = APIDTO.class.getDeclaredFields();
ObjectMapper mapper = new ObjectMapper();
String newApiDtoJsonString = mapper.writeValueAsString(apidto);
JSONParser parser = new JSONParser();
JSONObject newApiDtoJson = (JSONObject) parser.parse(newApiDtoJsonString);
String originalApiDtoJsonString = mapper.writeValueAsString(originalApiDTO);
JSONObject originalApiDtoJson = (JSONObject) parser.parse(originalApiDtoJsonString);
for (Field field : fields) {
org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] fieldAnnotatedScopes =
field.getAnnotationsByType(org.wso2.carbon.apimgt.rest.api.util.annotations.Scope.class);
String originalElementValue = mapper.writeValueAsString(originalApiDtoJson.get(field.getName()));
String newElementValue = mapper.writeValueAsString(newApiDtoJson.get(field.getName()));
if (!StringUtils.equals(originalElementValue, newElementValue)) {
originalApiDtoJson = overrideDTOValues(originalApiDtoJson, newApiDtoJson, field, tokenScopes,
fieldAnnotatedScopes);
}
}
updatedAPIDTO = mapper.readValue(originalApiDtoJson.toJSONString(), APIDTO.class);
} catch (IOException | ParseException e) {
String msg = "Error while processing API DTO json strings";
log.error(msg, e);
throw new APIManagementException(msg, e);
}
return updatedAPIDTO;
}
/**
* Override the API DTO field values with the user passed new values considering the field-wise scopes defined as
* allowed to update in REST API definition yaml
*/
private JSONObject overrideDTOValues(JSONObject originalApiDtoJson, JSONObject newApiDtoJson, Field field, String[]
tokenScopes, org.wso2.carbon.apimgt.rest.api.util.annotations.Scope[] fieldAnnotatedScopes) throws
APIManagementException {
for (String tokenScope : tokenScopes) {
for (org.wso2.carbon.apimgt.rest.api.util.annotations.Scope scopeAnt : fieldAnnotatedScopes) {
if (scopeAnt.name().equals(tokenScope)) {
// do the overriding
originalApiDtoJson.put(field.getName(), newApiDtoJson.get(field.getName()));
return originalApiDtoJson;
}
}
}
throw new APIManagementException("User is not authorized to update one or more API fields. None of the " +
"required scopes found in user token to update the field. So the request will be failed.");
}
/**
* Delete API
*
* @param apiId API Id
* @param ifMatch If-Match header value
* @return Status of API Deletion
*/
@Override
public Response apisApiIdDelete(String apiId, String ifMatch, MessageContext messageContext) {
try {
String username = RestApiUtil.getLoggedInUsername();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getProvider(username);
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
//check if the API has subscriptions
//Todo : need to optimize this check. This method seems too costly to check if subscription exists
List<SubscribedAPI> apiUsages = apiProvider.getAPIUsageByAPIId(apiIdentifier);
if (apiUsages != null && apiUsages.size() > 0) {
RestApiUtil.handleConflict("Cannot remove the API " + apiId + " as active subscriptions exist", log);
}
//deletes the API
apiProvider.deleteAPI(apiIdentifier);
KeyManager keyManager = KeyManagerHolder.getKeyManagerInstance();
keyManager.deleteRegisteredResourceByAPIId(apiId);
return Response.ok().build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Retrieves the content of a document
*
* @param apiId API identifier
* @param documentId document identifier
* @param ifNoneMatch If-None-Match header value
* @return Content of the document/ either inline/file or source url as a redirection
*/
@Override
public Response apisApiIdDocumentsDocumentIdContentGet(String apiId, String documentId,
String ifNoneMatch, MessageContext messageContext) {
Documentation documentation;
try {
String username = RestApiUtil.getLoggedInUsername();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
documentation = apiProvider.getDocumentation(documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
//gets the content depending on the type of the document
if (documentation.getSourceType().equals(Documentation.DocumentSourceType.FILE)) {
String resource = documentation.getFilePath();
Map<String, Object> docResourceMap = APIUtil.getDocument(username, resource, tenantDomain);
Object fileDataStream = docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_DATA);
Object contentType = docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_CONTENT_TYPE);
contentType = contentType == null ? RestApiConstants.APPLICATION_OCTET_STREAM : contentType;
String name = docResourceMap.get(APIConstants.DOCUMENTATION_RESOURCE_MAP_NAME).toString();
return Response.ok(fileDataStream)
.header(RestApiConstants.HEADER_CONTENT_TYPE, contentType)
.header(RestApiConstants.HEADER_CONTENT_DISPOSITION, "attachment; filename=\"" + name + "\"")
.build();
} else if (documentation.getSourceType().equals(Documentation.DocumentSourceType.INLINE) || documentation.getSourceType().equals(Documentation.DocumentSourceType.MARKDOWN)) {
String content = apiProvider.getDocumentationContent(apiIdentifier, documentation.getName());
return Response.ok(content)
.header(RestApiConstants.HEADER_CONTENT_TYPE, APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE)
.build();
} else if (documentation.getSourceType().equals(Documentation.DocumentSourceType.URL)) {
String sourceUrl = documentation.getSourceUrl();
return Response.seeOther(new URI(sourceUrl)).build();
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving document " + documentId + " of the API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving source URI location of " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Add content to a document. Content can be inline or File
*
* @param apiId API identifier
* @param documentId document identifier
* @param inputStream file input stream
* @param fileDetail file details as Attachment
* @param inlineContent inline content for the document
* @param ifMatch If-match header value
* @return updated document as DTO
*/
@Override
public Response apisApiIdDocumentsDocumentIdContentPost(String apiId, String documentId,
InputStream inputStream, Attachment fileDetail, String inlineContent, String ifMatch,
MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API api = APIMappingUtil.getAPIInfoFromUUID(apiId, tenantDomain);
if (inputStream != null && inlineContent != null) {
RestApiUtil.handleBadRequest("Only one of 'file' and 'inlineContent' should be specified", log);
}
//retrieves the document and send 404 if not found
Documentation documentation = apiProvider.getDocumentation(documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
//add content depending on the availability of either input stream or inline content
if (inputStream != null) {
if (!documentation.getSourceType().equals(Documentation.DocumentSourceType.FILE)) {
RestApiUtil.handleBadRequest("Source type of document " + documentId + " is not FILE", log);
}
RestApiPublisherUtils.attachFileToDocument(apiId, documentation, inputStream, fileDetail);
} else if (inlineContent != null) {
if (!documentation.getSourceType().equals(Documentation.DocumentSourceType.INLINE) &&
!documentation.getSourceType().equals(Documentation.DocumentSourceType.MARKDOWN)) {
RestApiUtil.handleBadRequest("Source type of document " + documentId + " is not INLINE " +
"or MARKDOWN", log);
}
apiProvider.addDocumentationContent(api, documentation.getName(), inlineContent);
} else {
RestApiUtil.handleBadRequest("Either 'file' or 'inlineContent' should be specified", log);
}
//retrieving the updated doc and the URI
Documentation updatedDoc = apiProvider.getDocumentation(documentId, tenantDomain);
DocumentDTO documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(updatedDoc);
String uriString = RestApiConstants.RESOURCE_PATH_DOCUMENT_CONTENT
.replace(RestApiConstants.APIID_PARAM, apiId)
.replace(RestApiConstants.DOCUMENTID_PARAM, documentId);
URI uri = new URI(uriString);
return Response.created(uri).entity(documentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while adding content to the document: " + documentId + " of API "
+ apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Failed to add content to the document " + documentId, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving document content location : " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} finally {
IOUtils.closeQuietly(inputStream);
}
return null;
}
/**
* Deletes an existing document of an API
*
* @param apiId API identifier
* @param documentId document identifier
* @param ifMatch If-match header value
* @return 200 response if deleted successfully
*/
@Override
public Response apisApiIdDocumentsDocumentIdDelete(String apiId, String documentId, String ifMatch,
MessageContext messageContext) {
Documentation documentation;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
documentation = apiProvider.getDocumentation(documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
}
apiProvider.removeDocumentation(apiIdentifier, documentId);
return Response.ok().build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while deleting : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response apisApiIdDocumentsDocumentIdGet(String apiId, String documentId, String ifNoneMatch,
MessageContext messageContext) {
Documentation documentation;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
documentation = apiProvider.getDocumentation(documentId, tenantDomain);
APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
}
DocumentDTO documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentation);
return Response.ok().entity(documentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving document : " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Updates an existing document of an API
*
* @param apiId API identifier
* @param documentId document identifier
* @param body updated document DTO
* @param ifMatch If-match header value
* @return updated document DTO as response
*/
@Override
public Response apisApiIdDocumentsDocumentIdPut(String apiId, String documentId, DocumentDTO body,
String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
String sourceUrl = body.getSourceUrl();
Documentation oldDocument = apiProvider.getDocumentation(documentId, tenantDomain);
//validation checks for existence of the document
if (oldDocument == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
if (body.getType() == DocumentDTO.TypeEnum.OTHER && org.apache.commons.lang3.StringUtils.isBlank(body.getOtherTypeName())) {
//check otherTypeName for not null if doc type is OTHER
RestApiUtil.handleBadRequest("otherTypeName cannot be empty if type is OTHER.", log);
return null;
}
if (body.getSourceType() == DocumentDTO.SourceTypeEnum.URL &&
(org.apache.commons.lang3.StringUtils.isBlank(sourceUrl) || !RestApiUtil.isURL(sourceUrl))) {
RestApiUtil.handleBadRequest("Invalid document sourceUrl Format", log);
return null;
}
//overriding some properties
body.setName(oldDocument.getName());
Documentation newDocumentation = DocumentationMappingUtil.fromDTOtoDocumentation(body);
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
newDocumentation.setFilePath(oldDocument.getFilePath());
apiProvider.updateDocumentation(apiIdentifier, newDocumentation);
//retrieve the updated documentation
newDocumentation = apiProvider.getDocumentation(documentId, tenantDomain);
return Response.ok().entity(DocumentationMappingUtil.fromDocumentationToDTO(newDocumentation)).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while updating the document " + documentId + " for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Returns all the documents of the given API identifier that matches to the search condition
*
* @param apiId API identifier
* @param limit max number of records returned
* @param offset starting index
* @param ifNoneMatch If-None-Match header value
* @return matched documents as a list if DocumentDTOs
*/
@Override
public Response apisApiIdDocumentsGet(String apiId, Integer limit, Integer offset, String ifNoneMatch,
MessageContext messageContext) {
// do some magic!
//pre-processing
//setting default limit and offset values if they are not set
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
List<Documentation> allDocumentation = apiProvider.getAllDocumentation(apiIdentifier);
DocumentListDTO documentListDTO = DocumentationMappingUtil.fromDocumentationListToDTO(allDocumentation,
offset, limit);
DocumentationMappingUtil
.setPaginationParams(documentListDTO, apiId, offset, limit, allDocumentation.size());
return Response.ok().entity(documentListDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving documents of API : " + apiId, e, log);
} else {
String msg = "Error while retrieving documents of API " + apiId;
RestApiUtil.handleInternalServerError(msg, e, log);
}
}
return null;
}
/**
* Add a documentation to an API
*
* @param apiId api identifier
* @param body Documentation DTO as request body
* @return created document DTO as response
*/
@Override
public Response apisApiIdDocumentsPost(String apiId, DocumentDTO body, String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
Documentation documentation = DocumentationMappingUtil.fromDTOtoDocumentation(body);
String documentName = body.getName();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
if (body.getType() == DocumentDTO.TypeEnum.OTHER && org.apache.commons.lang3.StringUtils.isBlank(body.getOtherTypeName())) {
//check otherTypeName for not null if doc type is OTHER
RestApiUtil.handleBadRequest("otherTypeName cannot be empty if type is OTHER.", log);
}
String sourceUrl = body.getSourceUrl();
if (body.getSourceType() == DocumentDTO.SourceTypeEnum.URL &&
(org.apache.commons.lang3.StringUtils.isBlank(sourceUrl) || !RestApiUtil.isURL(sourceUrl))) {
RestApiUtil.handleBadRequest("Invalid document sourceUrl Format", log);
}
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
if (apiProvider.isDocumentationExist(apiIdentifier, documentName)) {
String errorMessage = "Requested document '" + documentName + "' already exists";
RestApiUtil.handleResourceAlreadyExistsError(errorMessage, log);
}
apiProvider.addDocumentation(apiIdentifier, documentation);
//retrieve the newly added document
String newDocumentId = documentation.getId();
documentation = apiProvider.getDocumentation(newDocumentId, tenantDomain);
DocumentDTO newDocumentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentation);
String uriString = RestApiConstants.RESOURCE_PATH_DOCUMENTS_DOCUMENT_ID
.replace(RestApiConstants.APIID_PARAM, apiId)
.replace(RestApiConstants.DOCUMENTID_PARAM, newDocumentId);
URI uri = new URI(uriString);
return Response.created(uri).entity(newDocumentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while adding documents of API : " + apiId, e,
log);
} else {
String errorMessage = "Error while adding the document for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving location for document " + body.getName() + " of API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get external store list which the given API is already published to.
* @param apiId API Identifier
* @param ifNoneMatch If-None-Match header value
* @param messageContext CXF Message Context
* @return External Store list of published API
*/
@Override
public Response getAllPublishedExternalStoresByAPI(String apiId, String ifNoneMatch, MessageContext messageContext)
throws APIManagementException {
APIIdentifier apiIdentifier = null;
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
try {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while getting API: " + apiId;
log.error(errorMessage, e);
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
Set<APIStore> publishedStores = apiProvider.getPublishedExternalAPIStores(apiIdentifier);
ExternalStoreListDTO externalStoreListDTO =
ExternalStoreMappingUtil.fromExternalStoreCollectionToDTO(publishedStores);
return Response.ok().entity(externalStoreListDTO).build();
}
/**
* Retrieves API Lifecycle history information
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @return API Lifecycle history information
*/
@Override
public Response apisApiIdLifecycleHistoryGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
List<LifeCycleEvent> lifeCycleEvents = apiProvider.getLifeCycleEvents(apiIdentifier);
LifecycleHistoryDTO historyDTO = APIMappingUtil.fromLifecycleHistoryModelToDTO(lifeCycleEvents);
return Response.ok().entity(historyDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Retrieves API Lifecycle state information
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @return API Lifecycle state information
*/
@Override
public Response apisApiIdLifecycleStateGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
LifecycleStateDTO lifecycleStateDTO = getLifecycleState(apiId);
return Response.ok().entity(lifecycleStateDTO).build();
}
/**
* Retrieves API Lifecycle state information
*
* @param apiId API Id
* @return API Lifecycle state information
*/
private LifecycleStateDTO getLifecycleState(String apiId) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
Map<String, Object> apiLCData = apiProvider.getAPILifeCycleData(apiIdentifier);
if (apiLCData == null) {
String errorMessage = "Error while getting lifecycle state for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
return APIMappingUtil.fromLifecycleModelToDTO(apiLCData);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response apisApiIdLifecycleStatePendingTasksDelete(String apiId, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesGet(String apiId, Integer limit, Integer offset, String query,
String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesMediationPolicyIdDelete(String apiId, String mediationPolicyId,
String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesMediationPolicyIdGet(String apiId, String mediationPolicyId,
String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesMediationPolicyIdPut(String apiId, String mediationPolicyId,
MediationDTO body, String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdMediationPoliciesPost(MediationDTO body, String apiId, String ifMatch,
MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
/**
* Get API monetization status and monetized tier to billing plan mapping
*
* @param apiId API ID
* @param messageContext message context
* @return API monetization status and monetized tier to billing plan mapping
*/
@Override
public Response apisApiIdMonetizationGet(String apiId, MessageContext messageContext) {
try {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when retrieving monetized plans.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
API api = apiProvider.getAPI(apiIdentifier);
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
Map<String, String> monetizedPoliciesToPlanMapping = monetizationImplementation.
getMonetizedPoliciesToPlanMapping(api);
APIMonetizationInfoDTO monetizationInfoDTO = APIMappingUtil.getMonetizedTiersDTO
(apiIdentifier, monetizedPoliciesToPlanMapping);
return Response.ok().entity(monetizationInfoDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Failed to retrieve monetized plans for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
} catch (MonetizationException e) {
String errorMessage = "Failed to fetch monetized plans of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
return Response.serverError().build();
}
/**
* Monetize (enable or disable) for a given API
*
* @param apiId API ID
* @param body request body
* @param messageContext message context
* @return monetizationDTO
*/
@Override
public Response apisApiIdMonetizePost(String apiId, APIMonetizationInfoDTO body, MessageContext messageContext) {
try {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when configuring monetization.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
API api = apiProvider.getAPI(apiIdentifier);
if (!APIConstants.PUBLISHED.equalsIgnoreCase(api.getStatus())) {
String errorMessage = "API " + apiIdentifier.getApiName() +
" should be in published state to configure monetization.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
//set the monetization status
boolean monetizationEnabled = body.isEnabled();
api.setMonetizationStatus(monetizationEnabled);
//clear the existing properties related to monetization
api.getMonetizationProperties().clear();
Map<String, String> monetizationProperties = body.getProperties();
if (MapUtils.isNotEmpty(monetizationProperties)) {
String errorMessage = RestApiPublisherUtils.validateMonetizationProperties(monetizationProperties);
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
for (Map.Entry<String, String> currentEntry : monetizationProperties.entrySet()) {
api.addMonetizationProperty(currentEntry.getKey(), currentEntry.getValue());
}
}
apiProvider.configureMonetizationInAPIArtifact(api);
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
HashMap monetizationDataMap = new Gson().fromJson(api.getMonetizationProperties().toString(), HashMap.class);
boolean isMonetizationStateChangeSuccessful = false;
if (MapUtils.isEmpty(monetizationDataMap)) {
String errorMessage = "Monetization data map is empty for API ID " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
try {
if (monetizationEnabled) {
isMonetizationStateChangeSuccessful = monetizationImplementation.enableMonetization
(tenantDomain, api, monetizationDataMap);
} else {
isMonetizationStateChangeSuccessful = monetizationImplementation.disableMonetization
(tenantDomain, api, monetizationDataMap);
}
} catch (MonetizationException e) {
String errorMessage = "Error while changing monetization status for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
if (isMonetizationStateChangeSuccessful) {
APIMonetizationInfoDTO monetizationInfoDTO = APIMappingUtil.getMonetizationInfoDTO(apiIdentifier);
return Response.ok().entity(monetizationInfoDTO).build();
} else {
String errorMessage = "Unable to change monetization status for API : " + apiId;
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while configuring monetization for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return Response.serverError().build();
}
/**
* Publish API to given external stores.
*
* @param apiId API Id
* @param externalStoreIds External Store Ids
* @param ifMatch If-match header value
* @param messageContext CXF Message Context
* @return Response of published external store list
*/
@Override
public Response publishAPIToExternalStores(String apiId, List<String> externalStoreIds, String ifMatch,
MessageContext messageContext) throws APIManagementException {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API api = null;
try {
api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while getting API: " + apiId;
log.error(errorMessage, e);
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
if (apiProvider.publishToExternalAPIStores(api, externalStoreIds)) {
Set<APIStore> publishedStores = apiProvider.getPublishedExternalAPIStores(api.getId());
ExternalStoreListDTO externalStoreListDTO =
ExternalStoreMappingUtil.fromExternalStoreCollectionToDTO(publishedStores);
return Response.ok().entity(externalStoreListDTO).build();
}
return Response.serverError().build();
}
/**
* Get the resource policies(inflow/outflow).
*
* @param apiId API ID
* @param sequenceType sequence type('in' or 'out')
* @param resourcePath api resource path
* @param verb http verb
* @param ifNoneMatch If-None-Match header value
* @return json response of the resource policies according to the resource path
*/
@Override
public Response apisApiIdResourcePoliciesGet(String apiId, String sequenceType, String resourcePath,
String verb, String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
boolean isSoapToRESTApi = SOAPOperationBindingUtils
.isSOAPToRESTApi(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName());
if (isSoapToRESTApi) {
if (StringUtils.isEmpty(sequenceType) || !(RestApiConstants.IN_SEQUENCE.equals(sequenceType)
|| RestApiConstants.OUT_SEQUENCE.equals(sequenceType))) {
String errorMessage = "Sequence type should be either of the values from 'in' or 'out'";
RestApiUtil.handleBadRequest(errorMessage, log);
}
String resourcePolicy = SequenceUtils
.getRestToSoapConvertedSequence(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName(), sequenceType);
if (StringUtils.isEmpty(resourcePath) && StringUtils.isEmpty(verb)) {
ResourcePolicyListDTO resourcePolicyListDTO = APIMappingUtil
.fromResourcePolicyStrToDTO(resourcePolicy);
return Response.ok().entity(resourcePolicyListDTO).build();
}
if (StringUtils.isNotEmpty(resourcePath) && StringUtils.isNotEmpty(verb)) {
JSONObject sequenceObj = (JSONObject) new JSONParser().parse(resourcePolicy);
JSONObject resultJson = new JSONObject();
String key = resourcePath + "_" + verb;
JSONObject sequenceContent = (JSONObject) sequenceObj.get(key);
if (sequenceContent == null) {
String errorMessage = "Cannot find any resource policy for Resource path : " + resourcePath +
" with type: " + verb;
RestApiUtil.handleResourceNotFoundError(errorMessage, log);
}
resultJson.put(key, sequenceObj.get(key));
ResourcePolicyListDTO resourcePolicyListDTO = APIMappingUtil
.fromResourcePolicyStrToDTO(resultJson.toJSONString());
return Response.ok().entity(resourcePolicyListDTO).build();
} else if (StringUtils.isEmpty(resourcePath)) {
String errorMessage = "Resource path cannot be empty for the defined verb: " + verb;
RestApiUtil.handleBadRequest(errorMessage, log);
} else if (StringUtils.isEmpty(verb)) {
String errorMessage = "HTTP verb cannot be empty for the defined resource path: " + resourcePath;
RestApiUtil.handleBadRequest(errorMessage, log);
}
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (ParseException e) {
String errorMessage = "Error while retrieving the resource policies for the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get the resource policy given the resource id.
*
* @param apiId API ID
* @param resourcePolicyId resource policy id
* @param ifNoneMatch If-None-Match header value
* @return json response of the resource policy for the resource id given
*/
@Override
public Response apisApiIdResourcePoliciesResourcePolicyIdGet(String apiId, String resourcePolicyId,
String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
boolean isSoapToRESTApi = SOAPOperationBindingUtils
.isSOAPToRESTApi(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName());
if (isSoapToRESTApi) {
if (StringUtils.isEmpty(resourcePolicyId)) {
String errorMessage = "Resource id should not be empty to update a resource policy.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
String policyContent = SequenceUtils
.getResourcePolicyFromRegistryResourceId(apiIdentifier, resourcePolicyId);
ResourcePolicyInfoDTO resourcePolicyInfoDTO = APIMappingUtil
.fromResourcePolicyStrToInfoDTO(policyContent);
return Response.ok().entity(resourcePolicyInfoDTO).build();
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Update the resource policies(inflow/outflow) given the resource id.
*
* @param apiId API ID
* @param resourcePolicyId resource policy id
* @param body resource policy content
* @param ifMatch If-Match header value
* @return json response of the updated sequence content
*/
@Override
public Response apisApiIdResourcePoliciesResourcePolicyIdPut(String apiId, String resourcePolicyId,
ResourcePolicyInfoDTO body, String ifMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
boolean isSoapToRESTApi = SOAPOperationBindingUtils
.isSOAPToRESTApi(apiIdentifier.getApiName(), apiIdentifier.getVersion(),
apiIdentifier.getProviderName());
if (isSoapToRESTApi) {
if (StringUtils.isEmpty(resourcePolicyId)) {
String errorMessage = "Resource id should not be empty to update a resource policy.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
boolean isValidSchema = RestApiPublisherUtils.validateXMLSchema(body.getContent());
if (isValidSchema) {
SequenceUtils
.updateResourcePolicyFromRegistryResourceId(apiIdentifier, resourcePolicyId, body.getContent());
String updatedPolicyContent = SequenceUtils
.getResourcePolicyFromRegistryResourceId(apiIdentifier, resourcePolicyId);
ResourcePolicyInfoDTO resourcePolicyInfoDTO = APIMappingUtil
.fromResourcePolicyStrToInfoDTO(updatedPolicyContent);
return Response.ok().entity(resourcePolicyInfoDTO).build();
} else {
String errorMessage =
"Error while validating the resource policy xml content for the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get total revenue for a given API from all its' subscriptions
*
* @param apiId API ID
* @param messageContext message context
* @return revenue data for a given API
*/
@Override
public Response apisApiIdRevenueGet(String apiId, MessageContext messageContext) {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when getting revenue details.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
API api = apiProvider.getAPI(apiIdentifier);
if (!APIConstants.PUBLISHED.equalsIgnoreCase(api.getStatus())) {
String errorMessage = "API " + apiIdentifier.getApiName() +
" should be in published state to get total revenue.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
Map<String, String> revenueUsageData = monetizationImplementation.getTotalRevenue(api, apiProvider);
APIRevenueDTO apiRevenueDTO = new APIRevenueDTO();
apiRevenueDTO.setProperties(revenueUsageData);
return Response.ok().entity(apiRevenueDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Failed to retrieve revenue data for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
} catch (MonetizationException e) {
String errorMessage = "Failed to get current revenue data for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
return null;
}
@Override
public Response apisApiIdScopesGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesNameDelete(String apiId, String name, String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesNameGet(String apiId, String name, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesNamePut(String apiId, String name, ScopeDTO body, String ifMatch,
MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdScopesPost(String apiId, ScopeDTO body, String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
/**
* Retrieves the swagger document of an API
*
* @param apiId API identifier
* @param ifNoneMatch If-None-Match header value
* @return Swagger document of the API
*/
@Override
public Response apisApiIdSwaggerGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
String apiSwagger = apiProvider.getOpenAPIDefinition(apiIdentifier);
return Response.ok().entity(apiSwagger).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving swagger of API : " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving swagger of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Updates the swagger definition of an existing API
*
* @param apiId API identifier
* @param apiDefinition Swagger definition
* @param ifMatch If-match header value
* @return updated swagger document of the API
*/
@Override
public Response apisApiIdSwaggerPut(String apiId, String apiDefinition, String ifMatch, MessageContext messageContext) {
try {
APIDefinitionValidationResponse response = OASParserUtil
.validateAPIDefinition(apiDefinition, true);
if (!response.isValid()) {
RestApiUtil.handleBadRequest(response.getErrorItems(), log);
}
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
API existingAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIDefinition oasParser = response.getParser();
Set<URITemplate> uriTemplates = null;
try {
SwaggerData swaggerData = new SwaggerData(existingAPI);
uriTemplates = oasParser.getURITemplates(swaggerData, response.getJsonContent());
} catch (APIManagementException e) {
// catch APIManagementException inside again to capture validation error
RestApiUtil.handleBadRequest(e.getMessage(), log);
}
Set<Scope> scopes = oasParser.getScopes(apiDefinition);
//validating scope roles
for (Scope scope : scopes) {
String roles = scope.getRoles();
if (roles != null) {
for (String aRole : roles.split(",")) {
boolean isValidRole = APIUtil.isRoleNameExist(RestApiUtil.getLoggedInUsername(), aRole);
if (!isValidRole) {
String error = "Role '" + aRole + "' Does not exist.";
RestApiUtil.handleBadRequest(error, log);
}
}
}
}
existingAPI.setUriTemplates(uriTemplates);
existingAPI.setScopes(scopes);
//Update API is called to update URITemplates and scopes of the API
apiProvider.updateAPI(existingAPI);
SwaggerData swaggerData = new SwaggerData(existingAPI);
String updatedApiDefinition = oasParser.populateCustomManagementInfo(apiDefinition, swaggerData);
apiProvider.saveSwagger20Definition(existingAPI.getId(), updatedApiDefinition);
//retrieves the updated swagger definition
String apiSwagger = apiProvider.getOpenAPIDefinition(existingAPI.getId());
return Response.ok().entity(apiSwagger).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating swagger definition of API: " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Retrieves the thumbnail image of an API specified by API identifier
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @param messageContext If-Modified-Since header value
* @return Thumbnail image of the API
*/
@Override
public Response apisApiIdThumbnailGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
ResourceFile thumbnailResource = apiProvider.getIcon(apiIdentifier);
if (thumbnailResource != null) {
return Response
.ok(thumbnailResource.getContent(), MediaType.valueOf(thumbnailResource.getContentType()))
.build();
} else {
return Response.noContent().build();
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving thumbnail of API : " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving thumbnail of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response updateAPIThumbnail(String apiId, InputStream fileInputStream, Attachment fileDetail,
String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
String fileName = fileDetail.getDataHandler().getName();
String fileContentType = URLConnection.guessContentTypeFromName(fileName);
if (org.apache.commons.lang3.StringUtils.isBlank(fileContentType)) {
fileContentType = fileDetail.getContentType().toString();
}
//this will fail if user does not have access to the API or the API does not exist
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
ResourceFile apiImage = new ResourceFile(fileInputStream, fileContentType);
String thumbPath = APIUtil.getIconPath(api.getId());
String thumbnailUrl = apiProvider.addResourceFile(thumbPath, apiImage);
api.setThumbnailUrl(APIUtil.prependTenantPrefix(thumbnailUrl, api.getId().getProviderName()));
APIUtil.setResourcePermissions(api.getId().getProviderName(), null, null, thumbPath);
//Creating URI templates due to available uri templates in returned api object only kept single template
//for multiple http methods
String apiSwaggerDefinition = apiProvider.getOpenAPIDefinition(api.getId());
if (!org.apache.commons.lang3.StringUtils.isEmpty(apiSwaggerDefinition)) {
Optional<APIDefinition> definitionOptional = OASParserUtil.getOASParser(apiSwaggerDefinition);
if(!definitionOptional.isPresent()) {
RestApiUtil.handleInternalServerError("Error occurred while getting swagger parser.", log);
return null;
}
APIDefinition apiDefinition = definitionOptional.get();
SwaggerData swaggerData = new SwaggerData(api);
Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(swaggerData, apiSwaggerDefinition);
api.setUriTemplates(uriTemplates);
// scopes
Set<Scope> scopes = apiDefinition.getScopes(apiSwaggerDefinition);
api.setScopes(scopes);
}
apiProvider.updateAPI(api);
String uriString = RestApiConstants.RESOURCE_PATH_THUMBNAIL
.replace(RestApiConstants.APIID_PARAM, apiId);
URI uri = new URI(uriString);
FileInfoDTO infoDTO = new FileInfoDTO();
infoDTO.setRelativePath(uriString);
infoDTO.setMediaType(apiImage.getContentType());
return Response.created(uri).entity(infoDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while adding thumbnail for API : " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving thumbnail of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving thumbnail location of API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (FaultGatewaysException e) {
//This is logged and process is continued because icon is optional for an API
log.error("Failed to update API after adding icon. ", e);
} finally {
IOUtils.closeQuietly(fileInputStream);
}
return null;
}
@Override
public Response apisApiIdResourcePathsGet(String apiId, Integer limit, Integer offset, String ifNoneMatch,
MessageContext messageContext) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
List<ResourcePath> apiResourcePaths = apiProvider.getResourcePathsOfAPI(apiIdentifier);
ResourcePathListDTO dto = APIMappingUtil.fromResourcePathListToDTO(apiResourcePaths, limit, offset);
APIMappingUtil.setPaginationParamsForAPIResourcePathList(dto, offset, limit, apiResourcePaths.size());
return Response.ok().entity(dto).build();
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving resource paths of API : " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving resource paths of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Validate API Definition and retrieve as the response
*
* @param url URL of the OpenAPI definition
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param returnContent Whether to return the definition content
* @param messageContext CXF message context
* @return API Definition validation response
*/
@Override
public Response validateOpenAPIDefinition(String url, InputStream fileInputStream, Attachment fileDetail,
Boolean returnContent, MessageContext messageContext) {
// Validate and retrieve the OpenAPI definition
Map validationResponseMap = null;
try {
validationResponseMap = validateOpenAPIDefinition(url, fileInputStream, returnContent);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
OpenAPIDefinitionValidationResponseDTO validationResponseDTO =
(OpenAPIDefinitionValidationResponseDTO)validationResponseMap.get(RestApiConstants.RETURN_DTO);
return Response.ok().entity(validationResponseDTO).build();
}
/**
* Importing an OpenAPI definition and create an API
*
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param url URL of the OpenAPI definition
* @param additionalProperties API object (json) including additional properties like name, version, context
* @param messageContext CXF message context
* @return API Import using OpenAPI definition response
*/
@Override
public Response importOpenAPIDefinition(InputStream fileInputStream, Attachment fileDetail, String url,
String additionalProperties, MessageContext messageContext) {
// Validate and retrieve the OpenAPI definition
Map validationResponseMap = null;
try {
validationResponseMap = validateOpenAPIDefinition(url, fileInputStream, true);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
OpenAPIDefinitionValidationResponseDTO validationResponseDTO =
(OpenAPIDefinitionValidationResponseDTO) validationResponseMap.get(RestApiConstants.RETURN_DTO);
APIDefinitionValidationResponse validationResponse =
(APIDefinitionValidationResponse) validationResponseMap.get(RestApiConstants.RETURN_MODEL);
if (!validationResponseDTO.isIsValid()) {
ErrorDTO errorDTO = APIMappingUtil.getErrorDTOFromErrorListItems(validationResponseDTO.getErrors());
throw RestApiUtil.buildBadRequestException(errorDTO);
}
// Convert the 'additionalProperties' json into an APIDTO object
ObjectMapper objectMapper = new ObjectMapper();
APIDTO apiDTOFromProperties;
try {
apiDTOFromProperties = objectMapper.readValue(additionalProperties, APIDTO.class);
} catch (IOException e) {
throw RestApiUtil.buildBadRequestException("Error while parsing 'additionalProperties'", e);
}
// Only HTTP type APIs should be allowed
if (!APIDTO.TypeEnum.HTTP.equals(apiDTOFromProperties.getType())) {
throw RestApiUtil.buildBadRequestException("The API's type should only be HTTP when " +
"importing an OpenAPI definition");
}
// Import the API and Definition
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API apiToAdd = prepareToCreateAPIByDTO(apiDTOFromProperties);
String definitionToAdd;
boolean syncOperations = apiDTOFromProperties.getOperations().size() > 0;
// Rearrange paths according to the API payload and save the OpenAPI definition
APIDefinition apiDefinition = validationResponse.getParser();
SwaggerData swaggerData = new SwaggerData(apiToAdd);
definitionToAdd = apiDefinition.generateAPIDefinition(swaggerData,
validationResponse.getJsonContent(), syncOperations);
Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(swaggerData, definitionToAdd);
Set<Scope> scopes = apiDefinition.getScopes(definitionToAdd);
apiToAdd.setUriTemplates(uriTemplates);
apiToAdd.setScopes(scopes);
// adding the API and definition
apiProvider.addAPI(apiToAdd);
apiProvider.saveSwaggerDefinition(apiToAdd, definitionToAdd);
// retrieving the added API for returning as the response
API addedAPI = apiProvider.getAPI(apiToAdd.getId());
APIDTO createdApiDTO = APIMappingUtil.fromAPItoDTO(addedAPI);
// This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Validate a provided WSDL definition via a URL or a file/zip
*
* @param url WSDL URL
* @param fileInputStream file/zip input stream
* @param fileDetail file/zip details
* @param messageContext messageContext object
* @return WSDL validation response
* @throws APIManagementException when error occurred during validation
*/
@Override
public Response validateWSDLDefinition(String url, InputStream fileInputStream, Attachment fileDetail,
MessageContext messageContext) throws APIManagementException {
handleInvalidParams(fileInputStream, url);
WSDLValidationResponseDTO responseDTO;
WSDLValidationResponse validationResponse = new WSDLValidationResponse();
if (url != null) {
try {
URL wsdlUrl = new URL(url);
validationResponse = APIMWSDLReader.validateWSDLUrl(wsdlUrl);
} catch (MalformedURLException e) {
RestApiUtil.handleBadRequest("Invalid/Malformed URL : " + url, log);
}
} else if (fileInputStream != null) {
String filename = fileDetail.getContentDisposition().getFilename();
try {
if (filename.endsWith(".zip")) {
validationResponse =
APIMWSDLReader.extractAndValidateWSDLArchive(fileInputStream);
} else if (filename.endsWith(".wsdl")) {
validationResponse = APIMWSDLReader.validateWSDLFile(fileInputStream);
} else {
RestApiUtil.handleBadRequest("Unsupported extension type of file: " + filename, log);
}
} catch (APIManagementException e) {
String errorMessage = "Internal error while validating the WSDL from file:" + filename;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
responseDTO =
APIMappingUtil.fromWSDLValidationResponseToDTO(validationResponse);
return Response.ok().entity(responseDTO).build();
}
@Override
public Response importWSDLDefinition(InputStream fileInputStream, Attachment fileDetail, String url,
String additionalProperties, String implementationType, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
APIDTO additionalPropertiesAPI = null;
APIDTO createdApiDTO;
URI createdApiUri;
// Minimum requirement name, version, context and endpointConfig.
additionalPropertiesAPI = new ObjectMapper().readValue(additionalProperties, APIDTO.class);
additionalPropertiesAPI.setProvider(RestApiUtil.getLoggedInUsername());
additionalPropertiesAPI.setType(APIDTO.TypeEnum.SOAPTOREST);
API apiToAdd = prepareToCreateAPIByDTO(additionalPropertiesAPI);
//adding the api
apiProvider.addAPI(apiToAdd);
boolean isSoapToRestConvertedApi = APIDTO.TypeEnum.SOAPTOREST.equals(implementationType);
// TODO: First-cut only support URL SOAPToREST remove this todo if it's not
if (isSoapToRestConvertedApi && StringUtils.isNotBlank(url)) {
if (StringUtils.isNotBlank(url)) {
String swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(url);
apiProvider.saveSwagger20Definition(apiToAdd.getId(), swaggerStr);
SequenceGenerator.generateSequencesFromSwagger(swaggerStr, new Gson().toJson(additionalPropertiesAPI));
} else {
String errorMessage =
"Error while generating the swagger since the wsdl url is null for: " + apiProvider;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
}
APIIdentifier createdApiId = apiToAdd.getId();
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException | IOException | URISyntaxException e) {
return Response.serverError().entity(e.getMessage()).build();
}
}
@Override
public Response apisApiIdWsdlGet(String apiId, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisApiIdWsdlPut(String apiId, InputStream fileInputStream, Attachment fileDetail,
String ifMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
@Override
public Response apisChangeLifecyclePost(String action, String apiId, String lifecycleChecklist,
String ifMatch, MessageContext messageContext) {
//pre-processing
String[] checkListItems = lifecycleChecklist != null ? lifecycleChecklist.split(",") : new String[0];
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
Map<String, Object> apiLCData = apiProvider.getAPILifeCycleData(apiIdentifier);
String[] nextAllowedStates = (String[]) apiLCData.get(APIConstants.LC_NEXT_STATES);
if (!ArrayUtils.contains(nextAllowedStates, action)) {
RestApiUtil.handleBadRequest(
"Action '" + action + "' is not allowed. Allowed actions are " + Arrays
.toString(nextAllowedStates), log);
}
//check and set lifecycle check list items including "Deprecate Old Versions" and "Require Re-Subscription".
for (String checkListItem : checkListItems) {
String[] attributeValPair = checkListItem.split(":");
if (attributeValPair.length == 2) {
String checkListItemName = attributeValPair[0].trim();
boolean checkListItemValue = Boolean.valueOf(attributeValPair[1].trim());
apiProvider.checkAndChangeAPILCCheckListItem(apiIdentifier, checkListItemName, checkListItemValue);
}
}
//todo: check if API's tiers are properly set before Publishing
APIStateChangeResponse stateChangeResponse = apiProvider.changeLifeCycleStatus(apiIdentifier, action);
//returns the current lifecycle state
LifecycleStateDTO stateDTO = getLifecycleState(apiId);;
WorkflowResponseDTO workflowResponseDTO = APIMappingUtil
.toWorkflowResponseDTO(stateDTO, stateChangeResponse);
return Response.ok().entity(workflowResponseDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating the lifecycle of API " + apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Error while updating lifecycle of API " + apiId, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating the API in Gateway " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisCopyApiPost(String newVersion, String apiId, Boolean defaultVersion,
MessageContext messageContext) {
URI newVersionedApiUri;
APIDTO newVersionedApi;
try {
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = api.getId();
if (defaultVersion) {
api.setAsDefaultVersion(true);
}
//creates the new version
apiProvider.createNewAPIVersion(api, newVersion);
//get newly created API to return as response
APIIdentifier apiNewVersionedIdentifier =
new APIIdentifier(apiIdentifier.getProviderName(), apiIdentifier.getApiName(), newVersion);
newVersionedApi = APIMappingUtil.fromAPItoDTO(apiProvider.getAPI(apiNewVersionedIdentifier));
//This URI used to set the location header of the POST response
newVersionedApiUri =
new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + newVersionedApi.getId());
return Response.created(newVersionedApiUri).entity(newVersionedApi).build();
} catch (APIManagementException | DuplicateAPIException e) {
if (RestApiUtil.isDueToResourceAlreadyExists(e)) {
String errorMessage = "Requested new version " + newVersion + " of API " + apiId + " already exists";
RestApiUtil.handleResourceAlreadyExistsError(errorMessage, e, log);
} else if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while copying API : " + apiId, e, log);
} else {
String errorMessage = "Error while copying API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location of " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisHead(String query, String ifNoneMatch, MessageContext messageContext) {
// do some magic!
return Response.ok().entity("magic!").build();
}
/**
* Import a GraphQL Schema
* @param type APIType
* @param fileInputStream input file
* @param fileDetail file Detail
* @param additionalProperties api object as string format
* @param ifMatch If--Match header value
* @param messageContext messageContext
* @return Response with GraphQL API
*/
@Override
public Response apisImportGraphqlSchemaPost(String type, InputStream fileInputStream, Attachment fileDetail,
String additionalProperties, String ifMatch,
MessageContext messageContext) {
APIDTO additionalPropertiesAPI = null;
String schema = "";
try {
if (fileInputStream == null || StringUtils.isBlank(additionalProperties)) {
String errorMessage = "GraphQL schema and api details cannot be empty.";
RestApiUtil.handleBadRequest(errorMessage, log);
} else {
schema = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
}
if (!StringUtils.isBlank(additionalProperties) && !StringUtils.isBlank(schema)) {
if (log.isDebugEnabled()) {
log.debug("Deseriallizing additionalProperties: " + additionalProperties + "/n"
+ "importing schema: " + schema);
}
}
additionalPropertiesAPI = new ObjectMapper().readValue(additionalProperties, APIDTO.class);
additionalPropertiesAPI.setType(APIDTO.TypeEnum.GRAPHQL);
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API apiToAdd = prepareToCreateAPIByDTO(additionalPropertiesAPI);
//adding the api
apiProvider.addAPI(apiToAdd);
//Save swagger definition of graphQL
APIDefinitionFromOpenAPISpec apiDefinitionUsingOASParser = new APIDefinitionFromOpenAPISpec();
SwaggerData swaggerData = new SwaggerData(apiToAdd);
String apiDefinition = apiDefinitionUsingOASParser.generateAPIDefinition(swaggerData);
apiProvider.saveSwagger20Definition(apiToAdd.getId(), apiDefinition);
APIIdentifier createdApiId = apiToAdd.getId();
apiProvider.saveGraphqlSchemaDefinition(apiToAdd, schema);
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
APIDTO createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + additionalPropertiesAPI.getProvider() + "-" +
additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + additionalPropertiesAPI.getProvider() + "-" +
additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (IOException e) {
String errorMessage = "Error while retrieving content from file : " + additionalPropertiesAPI.getProvider()
+ "-" + additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion()
+ "-" /*+ body.getEndpointConfig()*/;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Validate graphQL Schema
* @param fileInputStream input file
* @param fileDetail file Detail
* @param messageContext messageContext
* @return Validation response
*/
@Override
public Response apisValidateGraphqlSchemaPost(InputStream fileInputStream, Attachment fileDetail, MessageContext messageContext) {
String errorMessage = "";
String schema;
TypeDefinitionRegistry typeRegistry;
Set<SchemaValidationError> validationErrors;
boolean isValid = false;
SchemaParser schemaParser = new SchemaParser();
GraphQLValidationResponseDTO validationResponse = new GraphQLValidationResponseDTO();
try {
schema = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
if (schema.isEmpty()) {
errorMessage = "GraphQL Schema cannot be empty or null to validate it";
RestApiUtil.handleBadRequest(errorMessage, log);
}
typeRegistry = schemaParser.parse(schema);
GraphQLSchema graphQLSchema = UnExecutableSchemaGenerator.makeUnExecutableSchema(typeRegistry);
SchemaValidator schemaValidation = new SchemaValidator();
validationErrors = schemaValidation.validateSchema(graphQLSchema);
if (validationErrors.toArray().length > 0) {
errorMessage = "InValid Schema";
} else {
isValid = true;
validationResponse.setIsValid(isValid);
GraphQLValidationResponseGraphQLInfoDTO graphQLInfo = new GraphQLValidationResponseGraphQLInfoDTO();
List<APIOperationsDTO> operationArray = extractGraphQLOperationList(schema);
graphQLInfo.setOperations(operationArray);
GraphQLSchemaDTO schemaObj = new GraphQLSchemaDTO();
schemaObj.setSchemaDefinition(schema);
graphQLInfo.setGraphQLSchema(schemaObj);
validationResponse.setGraphQLInfo(graphQLInfo);
}
} catch (SchemaProblem | IOException e) {
errorMessage = e.getMessage();
}
if(!isValid) {
validationResponse.setIsValid(isValid);
validationResponse.setErrorMessage(errorMessage);
}
return Response.ok().entity(validationResponse).build();
}
/**
* Extract GraphQL Operations from given schema
* @param schema graphQL Schema
* @return the arrayList of APIOperationsDTO
*/
private List<APIOperationsDTO> extractGraphQLOperationList(String schema) {
List<APIOperationsDTO> operationArray = new ArrayList<>();
SchemaParser schemaParser = new SchemaParser();
TypeDefinitionRegistry typeRegistry = schemaParser.parse(schema);
Map<java.lang.String, graphql.language.TypeDefinition> operationList = typeRegistry.types();
for (Map.Entry<String, TypeDefinition> entry : operationList.entrySet()) {
if (entry.getValue().getName().equals(APIConstants.GRAPHQL_QUERY) ||
entry.getValue().getName().equals(APIConstants.GRAPHQL_MUTATION)
|| entry.getValue().getName().equals(APIConstants.GRAPHQL_SUBSCRIPTION)) {
for (FieldDefinition fieldDef : ((ObjectTypeDefinition) entry.getValue()).getFieldDefinitions()) {
APIOperationsDTO operation = new APIOperationsDTO();
operation.setVerb(entry.getKey());
operation.setTarget(fieldDef.getName());
operationArray.add(operation);
}
}
}
return operationArray;
}
@Override
public Response apisApiIdSubscriptionPoliciesGet(String apiId, String ifNoneMatch, String xWSO2Tenant,
MessageContext messageContext) {
APIDTO apiInfo = getAPIByID(apiId);
List<Tier> availableThrottlingPolicyList = new ThrottlingPoliciesApiServiceImpl()
.getThrottlingPolicyList(ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString());
if (apiInfo != null ) {
List<String> apiPolicies = apiInfo.getPolicies();
if (apiPolicies != null && !apiPolicies.isEmpty()) {
List<Tier> apiThrottlingPolicies = new ArrayList<>();
for (Tier tier : availableThrottlingPolicyList) {
if (apiPolicies.contains(tier.getName())) {
apiThrottlingPolicies.add(tier);
}
}
return Response.ok().entity(apiThrottlingPolicies).build();
}
}
return null;
}
private APIDTO getAPIByID(String apiId) {
try {
String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
return APIMappingUtil.fromAPItoDTO(api);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("User is not authorized to access the API", e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Validate the provided OpenAPI definition (via file or url) and return the validation response DTO
*
* @param url OpenAPI definition url
* @param fileInputStream file as input stream
* @param returnContent whether to return the content of the definition in the response DTO
* @return Map with the validation response information. A value with key 'dto' will have the response DTO
* of type OpenAPIDefinitionValidationResponseDTO for the REST API. A value with key 'model' will have the
* validation response of type APIDefinitionValidationResponse coming from the impl level.
*/
private Map validateOpenAPIDefinition(String url, InputStream fileInputStream, Boolean returnContent)
throws APIManagementException {
handleInvalidParams(fileInputStream, url);
OpenAPIDefinitionValidationResponseDTO responseDTO;
APIDefinitionValidationResponse validationResponse = new APIDefinitionValidationResponse();
if (url != null) {
validationResponse = OASParserUtil.validateAPIDefinitionByURL(url, returnContent);
} else if (fileInputStream != null) {
try {
String openAPIContent = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
validationResponse = OASParserUtil.validateAPIDefinition(openAPIContent, returnContent);
} catch (IOException e) {
RestApiUtil.handleInternalServerError("Error while reading file content", e, log);
}
}
responseDTO = APIMappingUtil.getOpenAPIDefinitionValidationResponseFromModel(validationResponse,
returnContent);
Map response = new HashMap();
response.put(RestApiConstants.RETURN_MODEL, validationResponse);
response.put(RestApiConstants.RETURN_DTO, responseDTO);
return response;
}
/**
* Validate API import definition/validate definition parameters
*
* @param fileInputStream file content stream
* @param url URL of the definition
*/
private void handleInvalidParams(InputStream fileInputStream, String url) {
String msg = "";
if (url == null && fileInputStream == null) {
msg = "Either 'file' or 'url' should be specified";
}
if (fileInputStream != null && url != null) {
msg = "Only one of 'file' and 'url' should be specified";
}
if (StringUtils.isNotBlank(msg)) {
RestApiUtil.handleBadRequest(msg, log);
}
}
/**
* This method is used to assign micro gateway labels to the DTO
*
* @param apiDTO API DTO
* @param api the API object
* @return the API object with labels
*/
private API assignLabelsToDTO(APIDTO apiDTO, API api) {
if (apiDTO.getLabels() != null) {
List<LabelDTO> dtoLabels = apiDTO.getLabels();
List<Label> labelList = new ArrayList<>();
for (LabelDTO labelDTO : dtoLabels) {
Label label = new Label();
label.setName(labelDTO.getName());
// label.setDescription(labelDTO.getDescription()); todo add description
labelList.add(label);
}
api.setGatewayLabels(labelList);
}
return api;
}
/**
* To check whether a particular exception is due to access control restriction.
*
* @param e Exception object.
* @return true if the the exception is caused due to authorization failure.
*/
private boolean isAuthorizationFailure(Exception e) {
String errorMessage = e.getMessage();
return errorMessage != null && errorMessage.contains(APIConstants.UN_AUTHORIZED_ERROR_MESSAGE);
}
}
| Added Create New Audit API feature
| components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/impl/ApisApiServiceImpl.java | Added Create New Audit API feature |
|
Java | bsd-2-clause | 9c507a548c50cf2c94558299a4cb4f2eecdd4ea9 | 0 | chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio | /*
* Copyright (c) 2003-2006 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme.scene;
import java.io.IOException;
import java.util.Stack;
import com.jme.app.SimpleGame;
import com.jme.image.Texture;
import com.jme.intersection.CollisionResults;
import com.jme.renderer.ColorRGBA;
import com.jme.renderer.Renderer;
import com.jme.scene.state.AlphaState;
import com.jme.scene.state.RenderState;
import com.jme.scene.state.TextureState;
import com.jme.system.DisplaySystem;
import com.jme.util.TextureManager;
import com.jme.util.export.InputCapsule;
import com.jme.util.export.JMEExporter;
import com.jme.util.export.JMEImporter;
import com.jme.util.export.OutputCapsule;
/**
*
* <code>Text</code> allows text to be displayed on the screen. The
* renderstate of this Geometry must be a valid font texture.
*
* @author Mark Powell
* @version $Id: Text.java,v 1.26 2006-07-20 16:54:18 nca Exp $
*/
public class Text extends Geometry {
private static final long serialVersionUID = 1L;
private StringBuffer text;
private ColorRGBA textColor = new ColorRGBA();
/**
* The compiled list of renderstates for this geometry, taking into account
* ancestors' states - updated with updateRenderStates()
*/
public RenderState[] states = new RenderState[RenderState.RS_MAX_STATE];
public Text() {}
/**
* Creates a texture object that starts with the given text.
*
* @see com.jme.util.TextureManager
* @param name
* the name of the scene element. This is required for
* identification and comparision purposes.
* @param text
* The text to show.
*/
public Text(String name, String text) {
super(name);
setCullMode(SceneElement.CULL_NEVER);
this.text = new StringBuffer(text);
setRenderQueueMode(Renderer.QUEUE_ORTHO);
}
/**
*
* <code>print</code> sets the text to be rendered on the next render
* pass.
*
* @param text
* the text to display.
*/
public void print(String text) {
this.text.replace(0, this.text.length(), text);
}
/**
* Sets the text to be rendered on the next render. This function is a more
* efficient version of print(String).
*
* @param text
* The text to display.
*/
public void print(StringBuffer text) {
this.text.setLength(0);
this.text.append(text);
}
/**
*
* <code>getText</code> retrieves the text string of this
* <code>Text</code> object.
*
* @return the text string of this object.
*/
public StringBuffer getText() {
return text;
}
/**
* <code>draw</code> calls super to set the render state then calls the
* renderer to display the text string.
*
* @param r
* the renderer used to display the text.
*/
public void draw(Renderer r) {
if (!r.isProcessingQueue()) {
if (r.checkAndAdd(this)) return;
}
super.draw(r);
r.draw(this);
}
/**
* Sets the color of the text.
*
* @param color
* Color to set.
*/
public void setTextColor(ColorRGBA color) {
textColor.set(color);
}
/**
* Returns the current text color.
*
* @return Current text color.
*/
public ColorRGBA getTextColor() {
return textColor;
}
/*
* (non-Javadoc)
*
* @see com.jme.scene.Spatial#hasCollision(com.jme.scene.Spatial,
* com.jme.intersection.CollisionResults)
*/
public void findCollisions(Spatial scene, CollisionResults results) {
//Do nothing.
}
public boolean hasCollision(Spatial scene, boolean checkTriangles) {
return false;
}
public float getWidth() {
float rVal = 10f * text.length() * worldScale.x;
return rVal;
}
public float getHeight() {
float rVal = 16f * worldScale.y;
return rVal;
}
/**
* @return a Text with {@link #DEFAULT_FONT} and correct alpha state
* @param name name of the spatial
*/
public static Text createDefaultTextLabel( String name ) {
return createDefaultTextLabel( name, "" );
}
/**
* @return a Text with {@link #DEFAULT_FONT} and correct alpha state
* @param name name of the spatial
*/
public static Text createDefaultTextLabel( String name, String initialText ) {
Text text = new Text( name, initialText );
text.setCullMode( SceneElement.CULL_NEVER );
text.setRenderState( getDefaultFontTextureState() );
text.setRenderState( getFontAlpha() );
return text;
}
/*
* @return an alpha states for allowing 'black' to be transparent
*/
private static AlphaState getFontAlpha() {
AlphaState as1 = DisplaySystem.getDisplaySystem().getRenderer().createAlphaState();
as1.setBlendEnabled( true );
as1.setSrcFunction( AlphaState.SB_SRC_ALPHA );
as1.setDstFunction( AlphaState.DB_ONE );
as1.setTestEnabled( true );
as1.setTestFunction( AlphaState.TF_GREATER );
return as1;
}
/**
* texture state for the default font.
*/
private static TextureState defaultFontTextureState;
public static final void resetFontTexture() {
defaultFontTextureState = null;
}
/**
* A default font cantained in the jME library.
*/
public static final String DEFAULT_FONT = "com/jme/app/defaultfont.tga";
protected void applyRenderState(Stack[] states) {
for (int x = 0; x < states.length; x++) {
if (states[x].size() > 0) {
this.states[x] = ((RenderState) states[x].peek()).extract(
states[x], this);
} else {
this.states[x] = Renderer.defaultStateList[x];
}
}
}
/**
* Creates the texture state if not created before.
* @return texture state for the default font
*/
private static TextureState getDefaultFontTextureState() {
if ( defaultFontTextureState == null ) {
defaultFontTextureState = DisplaySystem.getDisplaySystem().getRenderer().createTextureState();
defaultFontTextureState.setTexture( TextureManager.loadTexture( SimpleGame.class
.getClassLoader().getResource( DEFAULT_FONT ), Texture.MM_LINEAR,
Texture.FM_LINEAR ) );
defaultFontTextureState.setEnabled( true );
}
return defaultFontTextureState;
}
public void write(JMEExporter e) throws IOException {
super.write(e);
OutputCapsule capsule = e.getCapsule(this);
capsule.write(text.toString(), "textString", "");
capsule.write(textColor, "textColor", new ColorRGBA());
}
public void read(JMEImporter e) throws IOException {
super.read(e);
InputCapsule capsule = e.getCapsule(this);
text = new StringBuffer(capsule.readString("textString", ""));
textColor = (ColorRGBA)capsule.readSavable("textColor", new ColorRGBA());
}
} | src/com/jme/scene/Text.java | /*
* Copyright (c) 2003-2006 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme.scene;
import java.io.IOException;
import java.util.Stack;
import com.jme.app.SimpleGame;
import com.jme.image.Texture;
import com.jme.intersection.CollisionResults;
import com.jme.renderer.ColorRGBA;
import com.jme.renderer.Renderer;
import com.jme.scene.state.AlphaState;
import com.jme.scene.state.RenderState;
import com.jme.scene.state.TextureState;
import com.jme.system.DisplaySystem;
import com.jme.util.TextureManager;
import com.jme.util.export.InputCapsule;
import com.jme.util.export.JMEExporter;
import com.jme.util.export.JMEImporter;
import com.jme.util.export.OutputCapsule;
/**
*
* <code>Text</code> allows text to be displayed on the screen. The
* renderstate of this Geometry must be a valid font texture.
*
* @author Mark Powell
* @version $Id: Text.java,v 1.25 2006-05-12 21:19:21 nca Exp $
*/
public class Text extends Geometry {
private static final long serialVersionUID = 1L;
private StringBuffer text;
private ColorRGBA textColor = new ColorRGBA();
/**
* The compiled list of renderstates for this geometry, taking into account
* ancestors' states - updated with updateRenderStates()
*/
public RenderState[] states = new RenderState[RenderState.RS_MAX_STATE];
public Text() {}
/**
* Creates a texture object that starts with the given text.
*
* @see com.jme.util.TextureManager
* @param name
* the name of the scene element. This is required for
* identification and comparision purposes.
* @param text
* The text to show.
*/
public Text(String name, String text) {
super(name);
setCullMode(SceneElement.CULL_NEVER);
this.text = new StringBuffer(text);
setRenderQueueMode(Renderer.QUEUE_ORTHO);
}
/**
*
* <code>print</code> sets the text to be rendered on the next render
* pass.
*
* @param text
* the text to display.
*/
public void print(String text) {
this.text.replace(0, this.text.length(), text);
}
/**
* Sets the text to be rendered on the next render. This function is a more
* efficient version of print(String).
*
* @param text
* The text to display.
*/
public void print(StringBuffer text) {
this.text.setLength(0);
this.text.append(text);
}
/**
*
* <code>getText</code> retrieves the text string of this
* <code>Text</code> object.
*
* @return the text string of this object.
*/
public StringBuffer getText() {
return text;
}
/**
* <code>draw</code> calls super to set the render state then calls the
* renderer to display the text string.
*
* @param r
* the renderer used to display the text.
*/
public void draw(Renderer r) {
if (!r.isProcessingQueue()) {
if (r.checkAndAdd(this)) return;
}
super.draw(r);
r.draw(this);
}
/**
* Sets the color of the text.
*
* @param color
* Color to set.
*/
public void setTextColor(ColorRGBA color) {
textColor.set(color);
}
/**
* Returns the current text color.
*
* @return Current text color.
*/
public ColorRGBA getTextColor() {
return textColor;
}
/*
* (non-Javadoc)
*
* @see com.jme.scene.Spatial#hasCollision(com.jme.scene.Spatial,
* com.jme.intersection.CollisionResults)
*/
public void findCollisions(Spatial scene, CollisionResults results) {
//Do nothing.
}
public boolean hasCollision(Spatial scene, boolean checkTriangles) {
return false;
}
public float getWidth() {
float rVal = 10f * text.length() * worldScale.x;
return rVal;
}
public float getHeight() {
float rVal = 16f * worldScale.y;
return rVal;
}
/**
* @return a Text with {@link #DEFAULT_FONT} and correct alpha state
* @param name name of the spatial
*/
public static Text createDefaultTextLabel( String name ) {
return createDefaultTextLabel( name, "" );
}
/**
* @return a Text with {@link #DEFAULT_FONT} and correct alpha state
* @param name name of the spatial
*/
public static Text createDefaultTextLabel( String name, String initialText ) {
Text text = new Text( name, initialText );
text.setCullMode( SceneElement.CULL_NEVER );
text.setRenderState( getDefaultFontTextureState() );
text.setRenderState( getFontAlpha() );
return text;
}
/*
* @return an alpha states for allowing 'black' to be transparent
*/
private static AlphaState getFontAlpha() {
AlphaState as1 = DisplaySystem.getDisplaySystem().getRenderer().createAlphaState();
as1.setBlendEnabled( true );
as1.setSrcFunction( AlphaState.SB_SRC_ALPHA );
as1.setDstFunction( AlphaState.DB_ONE );
as1.setTestEnabled( true );
as1.setTestFunction( AlphaState.TF_GREATER );
return as1;
}
/**
* texture state for the default font.
*/
private static TextureState defaultFontTextureState;
/**
* A default font cantained in the jME library.
*/
public static final String DEFAULT_FONT = "com/jme/app/defaultfont.tga";
protected void applyRenderState(Stack[] states) {
for (int x = 0; x < states.length; x++) {
if (states[x].size() > 0) {
this.states[x] = ((RenderState) states[x].peek()).extract(
states[x], this);
} else {
this.states[x] = Renderer.defaultStateList[x];
}
}
}
/**
* Creates the texture state if not created before.
* @return texture state for the default font
*/
private static TextureState getDefaultFontTextureState() {
if ( defaultFontTextureState == null ) {
defaultFontTextureState = DisplaySystem.getDisplaySystem().getRenderer().createTextureState();
defaultFontTextureState.setTexture( TextureManager.loadTexture( SimpleGame.class
.getClassLoader().getResource( DEFAULT_FONT ), Texture.MM_LINEAR,
Texture.FM_LINEAR ) );
defaultFontTextureState.setEnabled( true );
}
return defaultFontTextureState;
}
public void write(JMEExporter e) throws IOException {
super.write(e);
OutputCapsule capsule = e.getCapsule(this);
capsule.write(text.toString(), "textString", "");
capsule.write(textColor, "textColor", new ColorRGBA());
}
public void read(JMEImporter e) throws IOException {
super.read(e);
InputCapsule capsule = e.getCapsule(this);
text = new StringBuffer(capsule.readString("textString", ""));
textColor = (ColorRGBA)capsule.readSavable("textColor", new ColorRGBA());
}
} | ISSUE MINOR:
Ability to reset the font texture.
git-svn-id: 5afc437a751a4ff2ced778146f5faadda0b504ab@3105 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
| src/com/jme/scene/Text.java | ISSUE MINOR: |
|
Java | bsd-2-clause | df9df7bae16386d58c28c48a19680ae76995456c | 0 | scifio/scifio | //
// FluoviewReader.java
//
/*
LOCI Bio-Formats package for reading and converting biological file formats.
Copyright (C) 2005-@year@ Melissa Linkert, Curtis Rueden, Chris Allan,
Eric Kjellman and Brian Loranger.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Library General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.formats.in;
import java.awt.image.BufferedImage;
import java.io.*;
import java.util.*;
import loci.formats.*;
import loci.formats.meta.FilterMetadata;
import loci.formats.meta.MetadataStore;
/**
* FluoviewReader is the file format reader for
* Olympus Fluoview TIFF files AND Andor Bio-imaging Division (ABD) TIFF files.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/loci/formats/in/FluoviewReader.java">Trac</a>,
* <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/loci/formats/in/FluoviewReader.java">SVN</a></dd></dl>
*
* @author Eric Kjellman egkjellman at wisc.edu
* @author Melissa Linkert linkert at wisc.edu
* @author Curtis Rueden ctrueden at wisc.edu
*/
public class FluoviewReader extends BaseTiffReader {
// -- Constants --
/** Maximum number of bytes to check for Fluoview header information. */
private static final int BLOCK_CHECK_LEN = 16384;
/** String identifying a Fluoview file. */
private static final String FLUOVIEW_MAGIC_STRING = "FLUOVIEW";
/** Private TIFF tags */
private static final int MMHEADER = 34361;
private static final int MMSTAMP = 34362;
// -- Fields --
/** Pixel dimensions for this file. */
private float voxelX = 1f, voxelY = 1f, voxelZ = 1f, voxelC = 1f, voxelT = 1f;
/** First image. */
private BufferedImage zeroImage = null;
// hardware settings
private String[] gains, voltages, offsets, channelNames, lensNA;
private String mag, detManu, objManu, comment;
private Double gamma;
// -- Constructor --
/** Constructs a new Fluoview TIFF reader. */
public FluoviewReader() {
super("Olympus Fluoview/ABD TIFF", new String[] {"tif", "tiff"});
blockCheckLen = 524288;
suffixSufficient = false;
}
// -- IFormatReader API methods --
/* @see loci.formats.IFormatReader#isThisType(byte[]) */
public boolean isThisType(byte[] block) {
try {
RandomAccessStream stream = new RandomAccessStream(block);
Hashtable ifd = TiffTools.getFirstIFD(stream);
stream.close();
String com = TiffTools.getComment(ifd);
if (com == null) com = "";
return com.indexOf(FLUOVIEW_MAGIC_STRING) != -1 &&
ifd.containsKey(new Integer(MMHEADER)) ||
ifd.containsKey(new Integer(MMSTAMP));
}
catch (IOException e) {
if (debug) LogTools.trace(e);
}
catch (ArrayIndexOutOfBoundsException e) {
if (debug) LogTools.trace(e);
}
return false;
}
/**
* @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int)
*/
public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h)
throws FormatException, IOException
{
if (core.sizeY[0] == TiffTools.getImageLength(ifds[0])) {
return super.openBytes(no, buf, x, y, w, h);
}
FormatTools.assertId(currentId, true, 1);
FormatTools.checkPlaneNumber(this, no);
FormatTools.checkBufferSize(this, buf.length, w, h);
byte[] b = new byte[w * h *
getRGBChannelCount() * FormatTools.getBytesPerPixel(core.pixelType[0])];
super.openBytes(0, b, x, y, w, h);
System.arraycopy(b, 0, buf, 0, buf.length);
return buf;
}
// -- IFormatHandler API methods --
/* @see loci.formats.IFormatReader#close() */
public void close() throws IOException {
super.close();
voxelX = voxelY = voxelZ = voxelC = voxelT = 1f;
zeroImage = null;
}
// -- Internal BaseTiffReader API methods --
/* @see loci.formats.BaseTiffReader#initStandardMetadata() */
protected void initStandardMetadata() throws FormatException, IOException {
super.initStandardMetadata();
// First, we want to determine whether this file is a Fluoview TIFF.
// Originally, Andor TIFF had its own reader; however, the two formats are
// very similar, so it made more sense to merge the two formats into one
// reader.
short[] s = TiffTools.getIFDShortArray(ifds[0], MMHEADER, true);
byte[] mmheader = new byte[s.length];
for (int i=0; i<mmheader.length; i++) {
mmheader[i] = (byte) s[i];
if (mmheader[i] < 0) mmheader[i]++;
}
RandomAccessStream ras = new RandomAccessStream(mmheader);
ras.order(isLittleEndian());
put("Header Flag", ras.readShort());
put("Image Type", ras.readChar());
put("Image name", ras.readString(257));
ras.skipBytes(4); // skip pointer to data field
put("Number of colors", ras.readInt());
ras.skipBytes(4); // skip pointer to palette field
ras.skipBytes(4); // skip pointer to other palette field
put("Comment size", ras.readInt());
ras.skipBytes(4); // skip pointer to comment field
// read dimension information
String[] names = new String[10];
int[] sizes = new int[10];
double[] resolutions = new double[10];
for (int i=0; i<10; i++) {
names[i] = ras.readString(16);
sizes[i] = ras.readInt();
double origin = ras.readDouble();
resolutions[i] = ras.readDouble();
put("Dimension " + (i+1) + " Name", names[i]);
put("Dimension " + (i+1) + " Size", sizes[i]);
put("Dimension " + (i+1) + " Origin", origin);
put("Dimension " + (i+1) + " Resolution", resolutions[i]);
put("Dimension " + (i+1) + " Units", ras.readString(64));
}
ras.skipBytes(4); // skip pointer to spatial position data
put("Map type", ras.readShort());
put("Map min", ras.readDouble());
put("Map max", ras.readDouble());
put("Min value", ras.readDouble());
put("Max value", ras.readDouble());
ras.skipBytes(4); // skip pointer to map data
put("Gamma", ras.readDouble());
put("Offset", ras.readDouble());
// read gray channel data
put("Gray Channel Name", ras.readString(16));
put("Gray Channel Size", ras.readInt());
put("Gray Channel Origin", ras.readDouble());
put("Gray Channel Resolution", ras.readDouble());
put("Gray Channel Units", ras.readString(64));
ras.skipBytes(4); // skip pointer to thumbnail data
put("Voice field", ras.readInt());
ras.skipBytes(4); // skip pointer to voice field
// now we need to read the MMSTAMP data to determine dimension order
double[][] stamps = new double[8][ifds.length];
for (int i=0; i<ifds.length; i++) {
s = TiffTools.getIFDShortArray(ifds[i], MMSTAMP, true);
byte[] stamp = new byte[s.length];
for (int j=0; j<s.length; j++) {
stamp[j] = (byte) s[j];
if (stamp[j] < 0) stamp[j]++;
}
ras = new RandomAccessStream(stamp);
// each stamp is 8 doubles, representing the position on dimensions 3-10
for (int j=0; j<8; j++) {
stamps[j][i] = ras.readDouble();
}
}
// calculate the dimension order and axis sizes
core.sizeZ[0] = core.sizeC[0] = core.sizeT[0] = 1;
core.currentOrder[0] = "XY";
core.metadataComplete[0] = true;
for (int i=0; i<10; i++) {
String name = names[i];
int size = sizes[i];
float voxel = (float) resolutions[i];
if (name == null || size == 0) continue;
name = name.toLowerCase().trim();
if (name.length() == 0) continue;
if (name.equals("x")) {
if (core.sizeX[0] == 0) core.sizeX[0] = size;
voxelX = voxel;
}
else if (name.equals("y")) {
if (core.sizeY[0] == 0) core.sizeY[0] = size;
voxelY = voxel;
}
else if (name.equals("z") || name.equals("event")) {
core.sizeZ[0] *= size;
if (core.currentOrder[0].indexOf("Z") == -1) {
core.currentOrder[0] += "Z";
}
voxelZ = voxel;
}
else if (name.equals("ch") || name.equals("wavelength")) {
core.sizeC[0] *= size;
if (core.currentOrder[0].indexOf("C") == -1) {
core.currentOrder[0] += "C";
}
voxelC = voxel;
}
else {
core.sizeT[0] *= size;
if (core.currentOrder[0].indexOf("T") == -1) {
core.currentOrder[0] += "T";
}
voxelT = voxel;
}
}
if (core.currentOrder[0].indexOf("Z") == -1) core.currentOrder[0] += "Z";
if (core.currentOrder[0].indexOf("T") == -1) core.currentOrder[0] += "T";
if (core.currentOrder[0].indexOf("C") == -1) core.currentOrder[0] += "C";
core.imageCount[0] = ifds.length;
if (core.sizeZ[0] > ifds.length) core.sizeZ[0] = ifds.length;
if (core.sizeT[0] > ifds.length) core.sizeT[0] = ifds.length;
if (core.imageCount[0] == 1 && (core.sizeT[0] == core.sizeY[0] ||
core.sizeZ[0] == core.sizeY[0]) && (core.sizeT[0] > core.imageCount[0] ||
core.sizeZ[0] > core.imageCount[0]))
{
core.sizeY[0] = 1;
core.imageCount[0] = core.sizeZ[0] * core.sizeT[0] * core.sizeC[0];
}
// cut up the comment, if necessary
comment = TiffTools.getComment(ifds[0]);
gains = new String[core.sizeC[0]];
offsets = new String[core.sizeC[0]];
voltages = new String[core.sizeC[0]];
channelNames = new String[core.sizeC[0]];
lensNA = new String[core.sizeC[0]];
if (comment != null && comment.startsWith("[")) {
int start = comment.indexOf("[Acquisition Parameters]");
int end = comment.indexOf("[Acquisition Parameters End]");
if (start != -1 && end != -1 && end > start) {
String parms = comment.substring(start + 24, end).trim();
// this is an INI-style comment, with one key/value pair per line
StringTokenizer st = new StringTokenizer(parms, "\n");
while (st.hasMoreTokens()) {
String token = st.nextToken();
int eq = token.indexOf("=");
if (eq != -1) {
String key = token.substring(0, eq);
String value = token.substring(eq + 1);
addMeta(key, value);
if (key.startsWith("Gain Ch")) {
for (int i=0; i<gains.length; i++) {
if (gains[i] == null) {
gains[i] = value;
break;
}
}
}
else if (key.startsWith("PMT Voltage Ch")) {
for (int i=0; i<voltages.length; i++) {
if (voltages[i] == null) {
voltages[i] = value;
break;
}
}
}
else if (key.startsWith("Offset Ch")) {
for (int i=0; i<offsets.length; i++) {
if (offsets[i] == null) {
offsets[i] = value;
break;
}
}
}
else if (key.equals("Magnification")) mag = value;
else if (key.equals("System Configuration")) detManu = value;
else if (key.equals("Objective Lens")) objManu = value;
else if (key.equals("Gamma")) gamma = new Double(value);
else if (key.startsWith("Channel ") && key.endsWith("Dye")) {
for (int i=0; i<channelNames.length; i++) {
if (channelNames[i] == null) {
channelNames[i] = value;
break;
}
}
}
else if (key.startsWith("Confocal Aperture-Ch")) {
for (int i=0; i<lensNA.length; i++) {
if (lensNA[i] == null) {
lensNA[i] = value.substring(0, value.length() - 2);
break;
}
}
}
}
}
}
start = comment.indexOf("[Version Info]");
end = comment.indexOf("[Version Info End]");
if (start != -1 && end != -1 && end > start) {
comment = comment.substring(start + 14, end).trim();
start = comment.indexOf("=") + 1;
end = comment.indexOf("\n");
if (end > start) comment = comment.substring(start, end).trim();
else comment = comment.substring(start).trim();
}
else comment = "";
}
addMeta("Comment", comment);
}
/* @see loci.formats.in.BaseTiffReader#initMetadataStore() */
protected void initMetadataStore() {
super.initMetadataStore();
MetadataStore store =
new FilterMetadata(getMetadataStore(), isMetadataFiltered());
store.setImageName("", 0);
store.setImageDescription(comment, 0);
store.setImageCreationDate(
DataTools.convertDate(System.currentTimeMillis(), DataTools.UNIX), 0);
// populate Dimensions
store.setDimensionsPhysicalSizeX(new Float(voxelX), 0, 0);
store.setDimensionsPhysicalSizeY(new Float(voxelY), 0, 0);
store.setDimensionsPhysicalSizeZ(new Float(voxelZ), 0, 0);
store.setDimensionsTimeIncrement(new Float(voxelT), 0, 0);
if ((int) voxelC > 0) {
store.setDimensionsWaveIncrement(new Integer((int) voxelC), 0, 0);
}
for (int i=0; i<core.sizeC[0]; i++) {
if (channelNames[i] != null) {
store.setLogicalChannelName(channelNames[i].trim(), 0, i);
}
if (lensNA[i] != null) {
store.setObjectiveLensNA(new Float(lensNA[i]), 0, i);
}
//if (gains[i] != null) {
// store.setDetectorSettingsGain(new Float(gains[i]), 0, i);
//}
//if (offsets[i] != null) {
// store.setDetectorSettingsOffset(new Float(offsets[i]), 0, i);
//}
}
/*
for (int i=0; i<core.sizeC[0]; i++) {
// CTR CHECK
// store.setDisplayChannel(new Integer(i), null, null,
// gamma == null ? null : new Float(gamma.floatValue()), null);
if (voltages[i] != null) {
if (detManu != null) store.setDetectorManufacturer(detManu, 0, 0);
store.setDetectorVoltage(new Float(voltages[i]), 0, 0);
}
}
if (mag != null && mag.toLowerCase().endsWith("x")) {
mag = mag.substring(0, mag.length() - 1);
}
else if (mag == null) mag = "1";
if (objManu != null) store.setObjectiveManufacturer(objManu, 0, 0);
if (mag != null) {
store.setObjectiveCalibratedMagnification(new Float(mag), 0, 0);
}
*/
}
}
| loci/formats/in/FluoviewReader.java | //
// FluoviewReader.java
//
/*
LOCI Bio-Formats package for reading and converting biological file formats.
Copyright (C) 2005-@year@ Melissa Linkert, Curtis Rueden, Chris Allan,
Eric Kjellman and Brian Loranger.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Library General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.formats.in;
import java.awt.image.BufferedImage;
import java.io.*;
import java.util.*;
import loci.formats.*;
import loci.formats.meta.FilterMetadata;
import loci.formats.meta.MetadataStore;
/**
* FluoviewReader is the file format reader for
* Olympus Fluoview TIFF files AND Andor Bio-imaging Division (ABD) TIFF files.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/loci/formats/in/FluoviewReader.java">Trac</a>,
* <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/loci/formats/in/FluoviewReader.java">SVN</a></dd></dl>
*
* @author Eric Kjellman egkjellman at wisc.edu
* @author Melissa Linkert linkert at wisc.edu
* @author Curtis Rueden ctrueden at wisc.edu
*/
public class FluoviewReader extends BaseTiffReader {
// -- Constants --
/** Maximum number of bytes to check for Fluoview header information. */
private static final int BLOCK_CHECK_LEN = 16384;
/** String identifying a Fluoview file. */
private static final String FLUOVIEW_MAGIC_STRING = "FLUOVIEW";
/** Private TIFF tags */
private static final int MMHEADER = 34361;
private static final int MMSTAMP = 34362;
// -- Fields --
/** Pixel dimensions for this file. */
private float voxelX = 1f, voxelY = 1f, voxelZ = 1f, voxelC = 1f, voxelT = 1f;
/** First image. */
private BufferedImage zeroImage = null;
// hardware settings
private String[] gains, voltages, offsets, channelNames, lensNA;
private String mag, detManu, objManu, comment;
private Double gamma;
// -- Constructor --
/** Constructs a new Fluoview TIFF reader. */
public FluoviewReader() {
super("Olympus Fluoview/ABD TIFF", new String[] {"tif", "tiff"});
blockCheckLen = 524288;
suffixSufficient = false;
}
// -- IFormatReader API methods --
/* @see loci.formats.IFormatReader#isThisType(byte[]) */
public boolean isThisType(byte[] block) {
try {
RandomAccessStream stream = new RandomAccessStream(block);
Hashtable ifd = TiffTools.getFirstIFD(stream);
stream.close();
String comment = TiffTools.getComment(ifd);
if (comment == null) comment = "";
return comment.indexOf(FLUOVIEW_MAGIC_STRING) != -1 &&
ifd.containsKey(new Integer(MMHEADER)) ||
ifd.containsKey(new Integer(MMSTAMP));
}
catch (IOException e) {
if (debug) LogTools.trace(e);
}
catch (ArrayIndexOutOfBoundsException e) {
if (debug) LogTools.trace(e);
}
return false;
}
/**
* @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int)
*/
public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h)
throws FormatException, IOException
{
if (core.sizeY[0] == TiffTools.getImageLength(ifds[0])) {
return super.openBytes(no, buf, x, y, w, h);
}
FormatTools.assertId(currentId, true, 1);
FormatTools.checkPlaneNumber(this, no);
FormatTools.checkBufferSize(this, buf.length, w, h);
byte[] b = new byte[w * h *
getRGBChannelCount() * FormatTools.getBytesPerPixel(core.pixelType[0])];
super.openBytes(0, b, x, y, w, h);
System.arraycopy(b, 0, buf, 0, buf.length);
return buf;
}
// -- IFormatHandler API methods --
/* @see loci.formats.IFormatReader#close() */
public void close() throws IOException {
super.close();
voxelX = voxelY = voxelZ = voxelC = voxelT = 1f;
zeroImage = null;
}
// -- Internal BaseTiffReader API methods --
/* @see loci.formats.BaseTiffReader#initStandardMetadata() */
protected void initStandardMetadata() throws FormatException, IOException {
super.initStandardMetadata();
// First, we want to determine whether this file is a Fluoview TIFF.
// Originally, Andor TIFF had its own reader; however, the two formats are
// very similar, so it made more sense to merge the two formats into one
// reader.
short[] s = TiffTools.getIFDShortArray(ifds[0], MMHEADER, true);
byte[] mmheader = new byte[s.length];
for (int i=0; i<mmheader.length; i++) {
mmheader[i] = (byte) s[i];
if (mmheader[i] < 0) mmheader[i]++;
}
RandomAccessStream ras = new RandomAccessStream(mmheader);
ras.order(isLittleEndian());
put("Header Flag", ras.readShort());
put("Image Type", ras.readChar());
put("Image name", ras.readString(257));
ras.skipBytes(4); // skip pointer to data field
put("Number of colors", ras.readInt());
ras.skipBytes(4); // skip pointer to palette field
ras.skipBytes(4); // skip pointer to other palette field
put("Comment size", ras.readInt());
ras.skipBytes(4); // skip pointer to comment field
// read dimension information
String[] names = new String[10];
int[] sizes = new int[10];
double[] resolutions = new double[10];
for (int i=0; i<10; i++) {
names[i] = ras.readString(16);
sizes[i] = ras.readInt();
double origin = ras.readDouble();
resolutions[i] = ras.readDouble();
put("Dimension " + (i+1) + " Name", names[i]);
put("Dimension " + (i+1) + " Size", sizes[i]);
put("Dimension " + (i+1) + " Origin", origin);
put("Dimension " + (i+1) + " Resolution", resolutions[i]);
put("Dimension " + (i+1) + " Units", ras.readString(64));
}
ras.skipBytes(4); // skip pointer to spatial position data
put("Map type", ras.readShort());
put("Map min", ras.readDouble());
put("Map max", ras.readDouble());
put("Min value", ras.readDouble());
put("Max value", ras.readDouble());
ras.skipBytes(4); // skip pointer to map data
put("Gamma", ras.readDouble());
put("Offset", ras.readDouble());
// read gray channel data
put("Gray Channel Name", ras.readString(16));
put("Gray Channel Size", ras.readInt());
put("Gray Channel Origin", ras.readDouble());
put("Gray Channel Resolution", ras.readDouble());
put("Gray Channel Units", ras.readString(64));
ras.skipBytes(4); // skip pointer to thumbnail data
put("Voice field", ras.readInt());
ras.skipBytes(4); // skip pointer to voice field
// now we need to read the MMSTAMP data to determine dimension order
double[][] stamps = new double[8][ifds.length];
for (int i=0; i<ifds.length; i++) {
s = TiffTools.getIFDShortArray(ifds[i], MMSTAMP, true);
byte[] stamp = new byte[s.length];
for (int j=0; j<s.length; j++) {
stamp[j] = (byte) s[j];
if (stamp[j] < 0) stamp[j]++;
}
ras = new RandomAccessStream(stamp);
// each stamp is 8 doubles, representing the position on dimensions 3-10
for (int j=0; j<8; j++) {
stamps[j][i] = ras.readDouble();
}
}
// calculate the dimension order and axis sizes
core.sizeZ[0] = core.sizeC[0] = core.sizeT[0] = 1;
core.currentOrder[0] = "XY";
core.metadataComplete[0] = true;
for (int i=0; i<10; i++) {
String name = names[i];
int size = sizes[i];
float voxel = (float) resolutions[i];
if (name == null || size == 0) continue;
name = name.toLowerCase().trim();
if (name.length() == 0) continue;
if (name.equals("x")) {
if (core.sizeX[0] == 0) core.sizeX[0] = size;
voxelX = voxel;
}
else if (name.equals("y")) {
if (core.sizeY[0] == 0) core.sizeY[0] = size;
voxelY = voxel;
}
else if (name.equals("z") || name.equals("event")) {
core.sizeZ[0] *= size;
if (core.currentOrder[0].indexOf("Z") == -1) {
core.currentOrder[0] += "Z";
}
voxelZ = voxel;
}
else if (name.equals("ch") || name.equals("wavelength")) {
core.sizeC[0] *= size;
if (core.currentOrder[0].indexOf("C") == -1) {
core.currentOrder[0] += "C";
}
voxelC = voxel;
}
else {
core.sizeT[0] *= size;
if (core.currentOrder[0].indexOf("T") == -1) {
core.currentOrder[0] += "T";
}
voxelT = voxel;
}
}
if (core.currentOrder[0].indexOf("Z") == -1) core.currentOrder[0] += "Z";
if (core.currentOrder[0].indexOf("T") == -1) core.currentOrder[0] += "T";
if (core.currentOrder[0].indexOf("C") == -1) core.currentOrder[0] += "C";
core.imageCount[0] = ifds.length;
if (core.sizeZ[0] > ifds.length) core.sizeZ[0] = ifds.length;
if (core.sizeT[0] > ifds.length) core.sizeT[0] = ifds.length;
if (core.imageCount[0] == 1 && (core.sizeT[0] == core.sizeY[0] ||
core.sizeZ[0] == core.sizeY[0]) && (core.sizeT[0] > core.imageCount[0] ||
core.sizeZ[0] > core.imageCount[0]))
{
core.sizeY[0] = 1;
core.imageCount[0] = core.sizeZ[0] * core.sizeT[0] * core.sizeC[0];
}
// cut up the comment, if necessary
comment = TiffTools.getComment(ifds[0]);
gains = new String[core.sizeC[0]];
offsets = new String[core.sizeC[0]];
voltages = new String[core.sizeC[0]];
channelNames = new String[core.sizeC[0]];
lensNA = new String[core.sizeC[0]];
if (comment != null && comment.startsWith("[")) {
int start = comment.indexOf("[Acquisition Parameters]");
int end = comment.indexOf("[Acquisition Parameters End]");
if (start != -1 && end != -1 && end > start) {
String parms = comment.substring(start + 24, end).trim();
// this is an INI-style comment, with one key/value pair per line
StringTokenizer st = new StringTokenizer(parms, "\n");
while (st.hasMoreTokens()) {
String token = st.nextToken();
int eq = token.indexOf("=");
if (eq != -1) {
String key = token.substring(0, eq);
String value = token.substring(eq + 1);
addMeta(key, value);
if (key.startsWith("Gain Ch")) {
for (int i=0; i<gains.length; i++) {
if (gains[i] == null) {
gains[i] = value;
break;
}
}
}
else if (key.startsWith("PMT Voltage Ch")) {
for (int i=0; i<voltages.length; i++) {
if (voltages[i] == null) {
voltages[i] = value;
break;
}
}
}
else if (key.startsWith("Offset Ch")) {
for (int i=0; i<offsets.length; i++) {
if (offsets[i] == null) {
offsets[i] = value;
break;
}
}
}
else if (key.equals("Magnification")) mag = value;
else if (key.equals("System Configuration")) detManu = value;
else if (key.equals("Objective Lens")) objManu = value;
else if (key.equals("Gamma")) gamma = new Double(value);
else if (key.startsWith("Channel ") && key.endsWith("Dye")) {
for (int i=0; i<channelNames.length; i++) {
if (channelNames[i] == null) {
channelNames[i] = value;
break;
}
}
}
else if (key.startsWith("Confocal Aperture-Ch")) {
for (int i=0; i<lensNA.length; i++) {
if (lensNA[i] == null) {
lensNA[i] = value.substring(0, value.length() - 2);
break;
}
}
}
}
}
}
start = comment.indexOf("[Version Info]");
end = comment.indexOf("[Version Info End]");
if (start != -1 && end != -1 && end > start) {
comment = comment.substring(start + 14, end).trim();
start = comment.indexOf("=") + 1;
end = comment.indexOf("\n");
if (end > start) comment = comment.substring(start, end).trim();
else comment = comment.substring(start).trim();
}
else comment = "";
}
addMeta("Comment", comment);
}
/* @see loci.formats.in.BaseTiffReader#initMetadataStore() */
protected void initMetadataStore() {
super.initMetadataStore();
MetadataStore store =
new FilterMetadata(getMetadataStore(), isMetadataFiltered());
store.setImageName("", 0);
store.setImageDescription(comment, 0);
store.setImageCreationDate(
DataTools.convertDate(System.currentTimeMillis(), DataTools.UNIX), 0);
// populate Dimensions
store.setDimensionsPhysicalSizeX(new Float(voxelX), 0, 0);
store.setDimensionsPhysicalSizeY(new Float(voxelY), 0, 0);
store.setDimensionsPhysicalSizeZ(new Float(voxelZ), 0, 0);
store.setDimensionsTimeIncrement(new Float(voxelT), 0, 0);
if ((int) voxelC > 0) {
store.setDimensionsWaveIncrement(new Integer((int) voxelC), 0, 0);
}
for (int i=0; i<core.sizeC[0]; i++) {
if (channelNames[i] != null) {
store.setLogicalChannelName(channelNames[i].trim(), 0, i);
}
if (lensNA[i] != null) {
store.setObjectiveLensNA(new Float(lensNA[i]), 0, i);
}
//if (gains[i] != null) {
// store.setDetectorSettingsGain(new Float(gains[i]), 0, i);
//}
//if (offsets[i] != null) {
// store.setDetectorSettingsOffset(new Float(offsets[i]), 0, i);
//}
}
/*
for (int i=0; i<core.sizeC[0]; i++) {
// CTR CHECK
// store.setDisplayChannel(new Integer(i), null, null,
// gamma == null ? null : new Float(gamma.floatValue()), null);
if (voltages[i] != null) {
if (detManu != null) store.setDetectorManufacturer(detManu, 0, 0);
store.setDetectorVoltage(new Float(voltages[i]), 0, 0);
}
}
if (mag != null && mag.toLowerCase().endsWith("x")) {
mag = mag.substring(0, mag.length() - 1);
}
else if (mag == null) mag = "1";
if (objManu != null) store.setObjectiveManufacturer(objManu, 0, 0);
if (mag != null) {
store.setObjectiveCalibratedMagnification(new Float(mag), 0, 0);
}
*/
}
}
| Fix jikes warning.
| loci/formats/in/FluoviewReader.java | Fix jikes warning. |
|
Java | bsd-3-clause | 9e110d319088b7635de8098cea7eaf53e140a20c | 0 | bdezonia/zorbage,bdezonia/zorbage | // A zorbage example: visualizing the Lorenz system using jMonkeyEngine.
//
// See https://en.wikipedia.org/wiki/Lorenz_system
//
// This code is in the public domain. Use however you wish.
package lorenz;
import com.jme3.app.*;
import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.shape.Line;
import nom.bdezonia.zorbage.algebras.G;
import nom.bdezonia.zorbage.algorithm.ClassicRungeKutta;
import nom.bdezonia.zorbage.procedure.Procedure3;
import nom.bdezonia.zorbage.type.data.float32.real.Float32Member;
import nom.bdezonia.zorbage.type.data.float32.real.Float32VectorMember;
import nom.bdezonia.zorbage.type.storage.datasource.ArrayDataSource;
import nom.bdezonia.zorbage.type.storage.datasource.IndexedDataSource;
/**
*
* @author Barry DeZonia
*
*/
public class Main extends SimpleApplication{
private static final float SIGMA = 10;
private static final float RHO = 28;
private static final float BETA = 8f/3;
@Override
public void simpleInitApp() {
Procedure3<Float32Member,Float32VectorMember, Float32VectorMember> lorenz =
new Procedure3<Float32Member, Float32VectorMember, Float32VectorMember>()
{
private Float32Member xc = G.FLT.construct();
private Float32Member yc = G.FLT.construct();
private Float32Member zc = G.FLT.construct();
@Override
public void call(Float32Member t, Float32VectorMember y, Float32VectorMember result) {
if (y.length() != 4)
throw new IllegalArgumentException("oops");
result.alloc(4);
y.v(0, xc);
y.v(1, yc);
y.v(2, zc);
Float32Member v = G.FLT.construct();
v.setV(SIGMA * (yc.v()-xc.v()));
result.setV(0, v);
v.setV(xc.v()*(RHO-zc.v()) - yc.v());
result.setV(1, v);
v.setV(xc.v()*yc.v() - BETA*zc.v());
result.setV(2, v);
v.setV(1);
result.setV(3, v);
}
};
Float32VectorMember value = G.FLT_VEC.construct();
Float32Member t0 = G.FLT.construct();
Float32VectorMember y0 = G.FLT_VEC.construct("[0.5,0.5,0.1,0]");
int numSteps = 50000;
Float32Member dt = G.FLT.construct(((Double)(1.0 / 64)).toString());
IndexedDataSource<Float32VectorMember> results = ArrayDataSource.construct(G.FLT_VEC, numSteps);
ClassicRungeKutta.compute(G.FLT_VEC, G.FLT, lorenz, t0, y0, numSteps, dt, results);
float[] xs = new float[numSteps];
float[] ys = new float[numSteps];
float[] zs = new float[numSteps];
float[] ts = new float[numSteps];
Float32Member component = G.FLT.construct();
for (int i = 0; i < numSteps; i++) {
results.get(i, value);
value.v(0, component);
xs[i] = component.v();
value.v(1, component);
ys[i] = component.v();
value.v(2, component);
zs[i] = component.v();
value.v(3, component);
ts[i] = component.v();
}
Node origin = new Node("origin");
Material mat = new Material(assetManager,
"Common/MatDefs/Misc/Unshaded.j3md"); // create a simple material
mat.setColor("Color", ColorRGBA.Blue); // set color of material to blue
for (int i = 1; i < numSteps; i++) {
Vector3f start = new Vector3f(xs[i-1], ys[i-1], zs[i-1]);
Vector3f end = new Vector3f(xs[i], ys[i], zs[i]);
Line l = new Line(start, end);
Geometry geom = new Geometry("Line", l); // create geometry from the line
geom.setMaterial(mat); // set the line's material
origin.attachChild(geom);
}
rootNode.attachChild(origin); // make the geometry appear in the scene
}
public static void main(String[] args){
Main app = new Main();
app.start(); // start the viewer
}
}
| example/lorenz/Main.java | // A zorbage example: visualizing the Lorenz system using jMonkeyEngine.
//
// See https://en.wikipedia.org/wiki/Lorenz_system
//
// This code is in the public domain. Use however you wish.
package lorenz;
import com.jme3.app.*;
import com.jme3.material.Material;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.shape.Line;
import nom.bdezonia.zorbage.algebras.G;
import nom.bdezonia.zorbage.algorithm.ClassicRungeKutta;
import nom.bdezonia.zorbage.procedure.Procedure3;
import nom.bdezonia.zorbage.type.data.float32.real.Float32Member;
import nom.bdezonia.zorbage.type.data.float32.real.Float32VectorMember;
import nom.bdezonia.zorbage.type.storage.datasource.ArrayDataSource;
import nom.bdezonia.zorbage.type.storage.datasource.IndexedDataSource;
/**
*
* @author Barry DeZonia
*
*/
public class Main extends SimpleApplication{
private static final float SIGMA = 10;
private static final float RHO = 28;
private static final float BETA = 8f/3;
@Override
public void simpleInitApp() {
Procedure3<Float32Member,Float32VectorMember, Float32VectorMember> lorenz =
new Procedure3<Float32Member, Float32VectorMember, Float32VectorMember>()
{
@Override
public void call(Float32Member t, Float32VectorMember y, Float32VectorMember result) {
if (y.length() != 4)
throw new IllegalArgumentException("oops");
result.alloc(4);
Float32Member xc = G.FLT.construct();
Float32Member yc = G.FLT.construct();
Float32Member zc = G.FLT.construct();
y.v(0, xc);
y.v(1, yc);
y.v(2, zc);
Float32Member v = G.FLT.construct();
float val = SIGMA * (yc.v()-xc.v());
v.setV(val);
result.setV(0, v);
val = xc.v()*(RHO-zc.v()) - yc.v();
v.setV(val);
result.setV(1, v);
val = xc.v()*yc.v() - BETA*zc.v();
v.setV(val);
result.setV(2, v);
v.setV(1);
result.setV(3, v);
}
};
Float32VectorMember value = G.FLT_VEC.construct();
Float32Member t0 = G.FLT.construct();
Float32VectorMember y0 = G.FLT_VEC.construct("[0.5,0.5,0.1,0]");
int numSteps = 50000;
Float32Member dt = G.FLT.construct(((Double)(1.0 / 64)).toString());
IndexedDataSource<Float32VectorMember> results = ArrayDataSource.construct(G.FLT_VEC, numSteps);
ClassicRungeKutta.compute(G.FLT_VEC, G.FLT, lorenz, t0, y0, numSteps, dt, results);
float[] xs = new float[numSteps];
float[] ys = new float[numSteps];
float[] zs = new float[numSteps];
float[] ts = new float[numSteps];
Float32Member component = G.FLT.construct();
for (int i = 0; i < numSteps; i++) {
results.get(i, value);
value.v(0, component);
xs[i] = component.v();
value.v(1, component);
ys[i] = component.v();
value.v(2, component);
zs[i] = component.v();
value.v(3, component);
ts[i] = component.v();
}
Node origin = new Node("origin");
Material mat = new Material(assetManager,
"Common/MatDefs/Misc/Unshaded.j3md"); // create a simple material
mat.setColor("Color", ColorRGBA.Blue); // set color of material to blue
for (int i = 1; i < numSteps; i++) {
Vector3f start = new Vector3f(xs[i-1], ys[i-1], zs[i-1]);
Vector3f end = new Vector3f(xs[i], ys[i], zs[i]);
Line l = new Line(start, end);
Geometry geom = new Geometry("Line", l); // create geometry from the line
geom.setMaterial(mat); // set the line's material
origin.attachChild(geom);
}
rootNode.attachChild(origin); // make the geometry appear in the scene
}
public static void main(String[] args){
Main app = new Main();
app.start(); // start the viewer
}
}
| Improve the code style within the jMonkeyEngine example
| example/lorenz/Main.java | Improve the code style within the jMonkeyEngine example |
|
Java | bsd-3-clause | f983255b334d8e5771ad56320bb65111d3b573d6 | 0 | NCIP/cananolab,NCIP/cananolab,NCIP/cananolab | package gov.nih.nci.cananolab.service.admin.impl;
import gov.nih.nci.cananolab.domain.common.Author;
import gov.nih.nci.cananolab.domain.common.Datum;
import gov.nih.nci.cananolab.domain.common.ExperimentConfig;
import gov.nih.nci.cananolab.domain.common.File;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Organization;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.common.Protocol;
import gov.nih.nci.cananolab.domain.common.Publication;
import gov.nih.nci.cananolab.domain.function.Target;
import gov.nih.nci.cananolab.domain.function.TargetingFunction;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.domain.particle.ChemicalAssociation;
import gov.nih.nci.cananolab.domain.particle.ComposingElement;
import gov.nih.nci.cananolab.domain.particle.Function;
import gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity;
import gov.nih.nci.cananolab.domain.particle.NanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Sample;
import gov.nih.nci.cananolab.domain.particle.SampleComposition;
import gov.nih.nci.cananolab.exception.AdministrationException;
import gov.nih.nci.cananolab.exception.NoAccessException;
import gov.nih.nci.cananolab.exception.NotExistException;
import gov.nih.nci.cananolab.service.protocol.ProtocolService;
import gov.nih.nci.cananolab.service.protocol.helper.ProtocolServiceHelper;
import gov.nih.nci.cananolab.service.protocol.impl.ProtocolServiceLocalImpl;
import gov.nih.nci.cananolab.service.publication.PublicationService;
import gov.nih.nci.cananolab.service.publication.helper.PublicationServiceHelper;
import gov.nih.nci.cananolab.service.publication.impl.PublicationServiceLocalImpl;
import gov.nih.nci.cananolab.service.sample.SampleService;
import gov.nih.nci.cananolab.service.sample.impl.SampleServiceLocalImpl;
import gov.nih.nci.cananolab.service.security.SecurityService;
import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import org.apache.log4j.Logger;
import org.hibernate.FetchMode;
import org.hibernate.criterion.CriteriaSpecification;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Property;
/**
* Service methods for update createdBy field.
*
* @author lethai, pansu
*/
public class UpdateCreatedByServiceImpl {
private static Logger logger = Logger
.getLogger(UpdateCreatedByServiceImpl.class);
private int update(SampleService sampleService, List<String> sampleIds,
String currentCreatedBy, String newCreatedBy)
throws AdministrationException, NoAccessException {
int i = 0;
try {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
for (String sampleId : sampleIds) {
try {
Sample domain = this.findFullyLoadedSampleById(sampleId);
domain.setCreatedBy(newCreatedBy(domain.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(domain);
SampleComposition sampleComposition = domain
.getSampleComposition();
Collection<ChemicalAssociation> chemicalAssociation = new ArrayList<ChemicalAssociation>();
Collection<FunctionalizingEntity> functionalizingEntity = new ArrayList<FunctionalizingEntity>();
Collection<NanomaterialEntity> nanomaterialEntity = new ArrayList<NanomaterialEntity>();
Collection<Characterization> characterization = new ArrayList<Characterization>();
// point of contact
PointOfContact poc = domain.getPrimaryPointOfContact();
if (poc != null) {
poc.setCreatedBy(newCreatedBy(poc.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(poc);
// organization
Organization organization = poc.getOrganization();
if(organization != null){
organization.setCreatedBy(newCreatedBy(organization.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(organization);
}
}
if (domain.getOtherPointOfContactCollection() != null) {
for (PointOfContact otherpoc : domain
.getOtherPointOfContactCollection()) {
otherpoc.setCreatedBy(newCreatedBy(otherpoc.getCreatedBy(),
currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(otherpoc);
Organization organization1 = otherpoc.getOrganization();
if(organization1 != null){
organization1.setCreatedBy(newCreatedBy(organization1.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(organization1);
}
}
}
// updating Sample Composition
if (sampleComposition != null) {
if (sampleComposition.getFileCollection() != null) {
for (File file : sampleComposition
.getFileCollection()) {
file
.setCreatedBy(newCreatedBy(file
.getCreatedBy(), currentCreatedBy,
newCreatedBy));
appService.saveOrUpdate(file);
}
}
chemicalAssociation = sampleComposition
.getChemicalAssociationCollection();
functionalizingEntity = sampleComposition
.getFunctionalizingEntityCollection();
nanomaterialEntity = sampleComposition
.getNanomaterialEntityCollection();
characterization = domain
.getCharacterizationCollection();
for (ChemicalAssociation ca : chemicalAssociation) {
ca.setCreatedBy(newCreatedBy(ca.getCreatedBy(), currentCreatedBy, newCreatedBy));
Collection<File> fileCollection = ca.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
appService.saveOrUpdate(ca);
}
for (FunctionalizingEntity fe : functionalizingEntity) {
fe.setCreatedBy(newCreatedBy(fe.getCreatedBy(), currentCreatedBy, newCreatedBy));
Collection<File> fileCollection = fe.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
Collection<Function> functionCollection = fe.getFunctionCollection();
if(functionCollection != null){
for(Function f:functionCollection){
if(f != null){
f.setCreatedBy(newCreatedBy(f.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(f);
}
}
}
appService.saveOrUpdate(fe);
}
for (NanomaterialEntity ne : nanomaterialEntity) {
ne.setCreatedBy(newCreatedBy(ne.getCreatedBy(), currentCreatedBy, newCreatedBy));
Collection<File> fileCollection = ne.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
if (ne.getComposingElementCollection() != null) {
for (ComposingElement ce : ne
.getComposingElementCollection()) {
ce.setCreatedBy(newCreatedBy(ce
.getCreatedBy(), currentCreatedBy,
newCreatedBy));
if (ce.getInherentFunctionCollection() != null) {
for (Function function : ce
.getInherentFunctionCollection()) {
function.setCreatedBy(newCreatedBy(
function.getCreatedBy(),
currentCreatedBy, newCreatedBy));
if (function instanceof TargetingFunction) {
TargetingFunction tFunc = (TargetingFunction) function;
if (tFunc.getTargetCollection() != null) {
for (Target target : tFunc
.getTargetCollection()) {
target
.setCreatedBy(newCreatedBy(
target
.getCreatedBy(),
currentCreatedBy,
newCreatedBy));
appService.saveOrUpdate(target);
}
}
}
appService.saveOrUpdate(function);
}
}
}
}
appService.saveOrUpdate(ne);
}
for (Characterization c : characterization) {
c.setCreatedBy(newCreatedBy(c.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(c);
Collection<ExperimentConfig> experimentConfigCollection = c.getExperimentConfigCollection();
if(experimentConfigCollection != null){
for(ExperimentConfig expConfig : experimentConfigCollection){
if(expConfig != null){
expConfig.setCreatedBy(newCreatedBy(expConfig.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(expConfig);
}
}
}
Collection<Finding> findingCollection = c.getFindingCollection();
if(findingCollection != null){
for(Finding f : findingCollection){
if(f != null){
f.setCreatedBy(newCreatedBy(f.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(f);
Collection<Datum> datumCollection = f.getDatumCollection();
if(datumCollection != null){
for(Datum d : datumCollection){
if(d != null){
d.setCreatedBy(newCreatedBy(d.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(d);
}
}
}
Collection<File> fileCollection = f.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
}
}
}
}
}
} catch (Exception e) {
i++;
String error = "Error updating createdBy field for sample: "
+ sampleId;
logger.error(error, e);
}
}
} catch (Exception e) {
String error = "Error updating createdBy field for samples";
logger.error(error, e);
throw new AdministrationException(error, e);
}
return i;
}
private int update(PublicationService publicationService,
List<String> publicationIds, String currentCreatedBy,
String newCreatedBy) throws AdministrationException,
NoAccessException {
SecurityService securityService = ((PublicationServiceLocalImpl) publicationService)
.getSecurityService();
int i = 0;
try {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
PublicationServiceHelper helper = new PublicationServiceHelper(
securityService);
for (String publicationId : publicationIds) {
try {
Publication publication = helper
.findPublicationById(publicationId);
publication.setCreatedBy(newCreatedBy(publication.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(publication);
// author
Collection<Author> authorCollection = publication
.getAuthorCollection();
for (Author a : authorCollection) {
if(a != null ){
a.setCreatedBy(newCreatedBy(a.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(a);
}
}
} catch (Exception e) {
i++;
String error = "Error updating createdBy field for publication: "
+ publicationId;
logger.error(error, e);
}
}
} catch (Exception e) {
String error = "Error updating createdBy field for publications";
logger.error(error, e);
throw new AdministrationException(error, e);
}
return i;
}
private int update(ProtocolService protocolService,
List<String> protocolIds, String currentCreatedBy,
String newCreatedBy) throws AdministrationException,
NoAccessException {
SecurityService securityService = ((ProtocolServiceLocalImpl) protocolService)
.getSecurityService();
int i = 0;
try {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
ProtocolServiceHelper helper = new ProtocolServiceHelper(
securityService);
for (String protocolId : protocolIds) {
try {
Protocol protocol = helper.findProtocolById(protocolId);
protocol.setCreatedBy(newCreatedBy(protocol.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(protocol);
//file
File file = protocol.getFile();
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
} catch (Exception e) {
i++;
String error = "Error updating createdBy field for protocol: "
+ protocolId;
logger.error(error, e);
}
}
} catch (Exception e) {
String error = "Error updating createdBy field for protocols";
logger.error(error, e);
throw new AdministrationException(error, e);
}
return i;
}
public int update(SecurityService securityService, String currentCreatedBy,
String newCreatedBy) throws AdministrationException,
NoAccessException {
String userLoginName = securityService.getUserBean().getLoginName();
if (!("lethai".equals(userLoginName) || "pansu".equals(userLoginName))) {
throw new NoAccessException();
}
int numFailures = 0;
try {
SampleService sampleService = new SampleServiceLocalImpl(
securityService);
List<String> sampleIds = sampleService
.findSampleIdsByOwner(currentCreatedBy);
numFailures = this.update(sampleService, sampleIds,
currentCreatedBy, newCreatedBy);
ProtocolService protocolService = new ProtocolServiceLocalImpl(
securityService);
List<String> protocolIds = protocolService
.findProtocolIdsByOwner(currentCreatedBy);
numFailures += this.update(protocolService, protocolIds,
currentCreatedBy, newCreatedBy);
PublicationService publicationService = new PublicationServiceLocalImpl(
securityService);
List<String> publicationIds = publicationService
.findPublicationIdsByOwner(currentCreatedBy);
numFailures += this.update(publicationService, publicationIds,
currentCreatedBy, newCreatedBy);
} catch (Exception e) {
String error = "Error in updating createBy field " + e;
logger.error(error, e);
throw new AdministrationException(error, e);
}
return numFailures;
}
private String newCreatedBy(String existingOwner, String currentOwner, String newOwner){
int copyIndex = existingOwner.indexOf("COPY");
String newCreatedBy="";
if(copyIndex ==0){
newCreatedBy = newOwner + ":" + existingOwner.substring(copyIndex);
}else{
String test = existingOwner.substring(0, currentOwner.length());
if(test.equals(currentOwner)){
newCreatedBy = newOwner;
}else{
newCreatedBy = existingOwner;
}
}
return newCreatedBy;
}
private Sample findFullyLoadedSampleById(String sampleId) throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
// load composition and characterization separate because of Hibernate
// join limitation
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
Sample sample = null;
// load composition and characterization separate because of
// Hibernate join limitation
crit.setFetchMode("primaryPointOfContact", FetchMode.JOIN);
crit.setFetchMode("primaryPointOfContact.organization", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection.organization",
FetchMode.JOIN);
crit.setFetchMode("keywordCollection", FetchMode.JOIN);
crit.setFetchMode("publicationCollection", FetchMode.JOIN);
crit.setFetchMode("publicationCollection.authorCollection",
FetchMode.JOIN);
crit.setFetchMode("publicationCollection.keywordCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List result = appService.query(crit);
if (!result.isEmpty()) {
sample = (Sample) result.get(0);
}
if (sample == null) {
throw new NotExistException("Sample doesn't exist in the database");
}
// fully load composition
SampleComposition comp = this
.loadComposition(sample.getId().toString());
sample.setSampleComposition(comp);
// fully load characterizations
List<Characterization> chars = this.loadCharacterizations(sample
.getId().toString());
if (chars != null && !chars.isEmpty()) {
sample.setCharacterizationCollection(new HashSet<Characterization>(
chars));
} else {
sample.setCharacterizationCollection(null);
}
return sample;
}
private SampleComposition loadComposition(String sampleId) throws Exception {
SampleComposition composition = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria
.forClass(SampleComposition.class);
crit.createAlias("sample", "sample");
crit.add(Property.forName("sample.id").eq(new Long(sampleId)));
crit.setFetchMode("nanomaterialEntityCollection", FetchMode.JOIN);
crit.setFetchMode("nanomaterialEntityCollection.fileCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"nanomaterialEntityCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setFetchMode(
"nanomaterialEntityCollection.composingElementCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection.targetCollection",
FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection", FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection.fileCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"functionalizingEntityCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection.functionCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"functionalizingEntityCollection.functionCollection.targetCollection",
FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection.activationMethod",
FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection", FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection.fileCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"chemicalAssociationCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection.associatedElementA",
FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection.associatedElementB",
FetchMode.JOIN);
crit.setFetchMode("fileCollection", FetchMode.JOIN);
crit.setFetchMode("fileCollection.keywordCollection", FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List result = appService.query(crit);
if (!result.isEmpty()) {
composition = (SampleComposition) result.get(0);
}
return composition;
}
private List<Characterization> loadCharacterizations(String sampleId)
throws Exception {
List<Characterization> chars = new ArrayList<Characterization>();
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria
.forClass(Characterization.class);
crit.createAlias("sample", "sample");
crit.add(Property.forName("sample.id").eq(new Long(sampleId)));
// fully load characterization
crit.setFetchMode("pointOfContact", FetchMode.JOIN);
crit.setFetchMode("pointOfContact.organization", FetchMode.JOIN);
crit.setFetchMode("protocol", FetchMode.JOIN);
crit.setFetchMode("protocol.file", FetchMode.JOIN);
crit.setFetchMode("protocol.file.keywordCollection", FetchMode.JOIN);
crit.setFetchMode("experimentConfigCollection", FetchMode.JOIN);
crit.setFetchMode("experimentConfigCollection.technique",
FetchMode.JOIN);
crit.setFetchMode("experimentConfigCollection.instrumentCollection",
FetchMode.JOIN);
crit.setFetchMode("findingCollection", FetchMode.JOIN);
crit.setFetchMode("findingCollection.datumCollection", FetchMode.JOIN);
crit.setFetchMode(
"findingCollection.datumCollection.conditionCollection",
FetchMode.JOIN);
crit.setFetchMode("findingCollection.fileCollection", FetchMode.JOIN);
crit.setFetchMode("findingCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List results = appService.query(crit);
for (Object obj : results) {
Characterization achar = (Characterization) obj;
chars.add(achar);
}
return chars;
}
}
| software/cananolab-webapp/src/gov/nih/nci/cananolab/service/admin/impl/UpdateCreatedByServiceImpl.java | package gov.nih.nci.cananolab.service.admin.impl;
import gov.nih.nci.cananolab.domain.common.Author;
import gov.nih.nci.cananolab.domain.common.Datum;
import gov.nih.nci.cananolab.domain.common.ExperimentConfig;
import gov.nih.nci.cananolab.domain.common.File;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Organization;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.common.Protocol;
import gov.nih.nci.cananolab.domain.common.Publication;
import gov.nih.nci.cananolab.domain.function.Target;
import gov.nih.nci.cananolab.domain.function.TargetingFunction;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.domain.particle.ChemicalAssociation;
import gov.nih.nci.cananolab.domain.particle.ComposingElement;
import gov.nih.nci.cananolab.domain.particle.Function;
import gov.nih.nci.cananolab.domain.particle.FunctionalizingEntity;
import gov.nih.nci.cananolab.domain.particle.NanomaterialEntity;
import gov.nih.nci.cananolab.domain.particle.Sample;
import gov.nih.nci.cananolab.domain.particle.SampleComposition;
import gov.nih.nci.cananolab.exception.AdministrationException;
import gov.nih.nci.cananolab.exception.NoAccessException;
import gov.nih.nci.cananolab.exception.NotExistException;
import gov.nih.nci.cananolab.service.protocol.ProtocolService;
import gov.nih.nci.cananolab.service.protocol.helper.ProtocolServiceHelper;
import gov.nih.nci.cananolab.service.protocol.impl.ProtocolServiceLocalImpl;
import gov.nih.nci.cananolab.service.publication.PublicationService;
import gov.nih.nci.cananolab.service.publication.helper.PublicationServiceHelper;
import gov.nih.nci.cananolab.service.publication.impl.PublicationServiceLocalImpl;
import gov.nih.nci.cananolab.service.sample.SampleService;
import gov.nih.nci.cananolab.service.sample.impl.SampleServiceLocalImpl;
import gov.nih.nci.cananolab.service.security.SecurityService;
import gov.nih.nci.cananolab.system.applicationservice.CustomizedApplicationService;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import org.apache.log4j.Logger;
import org.hibernate.FetchMode;
import org.hibernate.criterion.CriteriaSpecification;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Property;
/**
* Service methods for update createdBy field.
*
* @author lethai, pansu
*/
public class UpdateCreatedByServiceImpl {
private static Logger logger = Logger
.getLogger(UpdateCreatedByServiceImpl.class);
private int update(SampleService sampleService, List<String> sampleIds,
String currentCreatedBy, String newCreatedBy)
throws AdministrationException, NoAccessException {
int i = 0;
try {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
for (String sampleId : sampleIds) {
try {
Sample domain = this.findFullyLoadedSampleById(sampleId);
domain.setCreatedBy(newCreatedBy(domain.getCreatedBy(), currentCreatedBy, newCreatedBy));
SampleComposition sampleComposition = domain
.getSampleComposition();
Collection<ChemicalAssociation> chemicalAssociation = new ArrayList<ChemicalAssociation>();
Collection<FunctionalizingEntity> functionalizingEntity = new ArrayList<FunctionalizingEntity>();
Collection<NanomaterialEntity> nanomaterialEntity = new ArrayList<NanomaterialEntity>();
Collection<Characterization> characterization = new ArrayList<Characterization>();
// point of contact
PointOfContact poc = domain.getPrimaryPointOfContact();
if (poc != null) {
poc.setCreatedBy(newCreatedBy(poc.getCreatedBy(), currentCreatedBy, newCreatedBy));
// organization
Organization organization = poc.getOrganization();
if(organization != null){
organization.setCreatedBy(newCreatedBy(organization.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(organization);
}
appService.saveOrUpdate(poc);
}
if (domain.getOtherPointOfContactCollection() != null) {
for (PointOfContact otherpoc : domain
.getOtherPointOfContactCollection()) {
otherpoc.setCreatedBy(newCreatedBy(otherpoc.getCreatedBy(),
currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(otherpoc);
}
}
// updating Sample Composition
if (sampleComposition != null) {
if (sampleComposition.getFileCollection() != null) {
for (File file : sampleComposition
.getFileCollection()) {
file
.setCreatedBy(newCreatedBy(file
.getCreatedBy(), currentCreatedBy,
newCreatedBy));
appService.saveOrUpdate(file);
}
}
chemicalAssociation = sampleComposition
.getChemicalAssociationCollection();
functionalizingEntity = sampleComposition
.getFunctionalizingEntityCollection();
nanomaterialEntity = sampleComposition
.getNanomaterialEntityCollection();
characterization = domain
.getCharacterizationCollection();
for (ChemicalAssociation ca : chemicalAssociation) {
ca.setCreatedBy(newCreatedBy(ca.getCreatedBy(), currentCreatedBy, newCreatedBy));
Collection<File> fileCollection = ca.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
appService.saveOrUpdate(ca);
}
for (FunctionalizingEntity fe : functionalizingEntity) {
fe.setCreatedBy(newCreatedBy(fe.getCreatedBy(), currentCreatedBy, newCreatedBy));
Collection<File> fileCollection = fe.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
Collection<Function> functionCollection = fe.getFunctionCollection();
if(functionCollection != null){
for(Function f:functionCollection){
if(f != null){
f.setCreatedBy(newCreatedBy(f.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(f);
}
}
}
appService.saveOrUpdate(fe);
}
for (NanomaterialEntity ne : nanomaterialEntity) {
ne.setCreatedBy(newCreatedBy(ne.getCreatedBy(), currentCreatedBy, newCreatedBy));
Collection<File> fileCollection = ne.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
if (ne.getComposingElementCollection() != null) {
for (ComposingElement ce : ne
.getComposingElementCollection()) {
ce.setCreatedBy(newCreatedBy(ce
.getCreatedBy(), currentCreatedBy,
newCreatedBy));
if (ce.getInherentFunctionCollection() != null) {
for (Function function : ce
.getInherentFunctionCollection()) {
function.setCreatedBy(newCreatedBy(
function.getCreatedBy(),
currentCreatedBy, newCreatedBy));
if (function instanceof TargetingFunction) {
TargetingFunction tFunc = (TargetingFunction) function;
if (tFunc.getTargetCollection() != null) {
for (Target target : tFunc
.getTargetCollection()) {
target
.setCreatedBy(newCreatedBy(
target
.getCreatedBy(),
currentCreatedBy,
newCreatedBy));
appService.saveOrUpdate(target);
}
}
}
appService.saveOrUpdate(function);
}
}
}
}
appService.saveOrUpdate(ne);
}
for (Characterization c : characterization) {
c.setCreatedBy(newCreatedBy(c.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(c);
Collection<ExperimentConfig> experimentConfigCollection = c.getExperimentConfigCollection();
if(experimentConfigCollection != null){
for(ExperimentConfig expConfig : experimentConfigCollection){
if(expConfig != null){
expConfig.setCreatedBy(newCreatedBy(expConfig.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(expConfig);
}
}
}
Collection<Finding> findingCollection = c.getFindingCollection();
if(findingCollection != null){
for(Finding f : findingCollection){
if(f != null){
f.setCreatedBy(newCreatedBy(f.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(f);
Collection<Datum> datumCollection = f.getDatumCollection();
if(datumCollection != null){
for(Datum d : datumCollection){
if(d != null){
d.setCreatedBy(newCreatedBy(d.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(d);
}
}
}
Collection<File> fileCollection = f.getFileCollection();
if(fileCollection != null){
for(File file:fileCollection){
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
}
}
}
}
}
}
}
appService.saveOrUpdate(domain);
} catch (Exception e) {
i++;
String error = "Error updating createdBy field for sample: "
+ sampleId;
logger.error(error, e);
}
}
} catch (Exception e) {
String error = "Error updating createdBy field for samples";
logger.error(error, e);
throw new AdministrationException(error, e);
}
return i;
}
private int update(PublicationService publicationService,
List<String> publicationIds, String currentCreatedBy,
String newCreatedBy) throws AdministrationException,
NoAccessException {
SecurityService securityService = ((PublicationServiceLocalImpl) publicationService)
.getSecurityService();
int i = 0;
try {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
PublicationServiceHelper helper = new PublicationServiceHelper(
securityService);
for (String publicationId : publicationIds) {
try {
Publication publication = helper
.findPublicationById(publicationId);
publication.setCreatedBy(newCreatedBy(publication.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(publication);
// author
Collection<Author> authorCollection = publication
.getAuthorCollection();
for (Author a : authorCollection) {
if(a != null ){
a.setCreatedBy(newCreatedBy(a.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(a);
}
}
} catch (Exception e) {
i++;
String error = "Error updating createdBy field for publication: "
+ publicationId;
logger.error(error, e);
}
}
} catch (Exception e) {
String error = "Error updating createdBy field for publications";
logger.error(error, e);
throw new AdministrationException(error, e);
}
return i;
}
private int update(ProtocolService protocolService,
List<String> protocolIds, String currentCreatedBy,
String newCreatedBy) throws AdministrationException,
NoAccessException {
SecurityService securityService = ((ProtocolServiceLocalImpl) protocolService)
.getSecurityService();
int i = 0;
try {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
ProtocolServiceHelper helper = new ProtocolServiceHelper(
securityService);
for (String protocolId : protocolIds) {
try {
Protocol protocol = helper.findProtocolById(protocolId);
protocol.setCreatedBy(newCreatedBy(protocol.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(protocol);
//file
File file = protocol.getFile();
if(file != null){
file.setCreatedBy(newCreatedBy(file.getCreatedBy(), currentCreatedBy, newCreatedBy));
appService.saveOrUpdate(file);
}
} catch (Exception e) {
i++;
String error = "Error updating createdBy field for protocol: "
+ protocolId;
logger.error(error, e);
}
}
} catch (Exception e) {
String error = "Error updating createdBy field for protocols";
logger.error(error, e);
throw new AdministrationException(error, e);
}
return i;
}
public int update(SecurityService securityService, String currentCreatedBy,
String newCreatedBy) throws AdministrationException,
NoAccessException {
String userLoginName = securityService.getUserBean().getLoginName();
if (!("lethai".equals(userLoginName) || "pansu".equals(userLoginName))) {
throw new NoAccessException();
}
int numFailures = 0;
try {
SampleService sampleService = new SampleServiceLocalImpl(
securityService);
List<String> sampleIds = sampleService
.findSampleIdsByOwner(currentCreatedBy);
numFailures = this.update(sampleService, sampleIds,
currentCreatedBy, newCreatedBy);
ProtocolService protocolService = new ProtocolServiceLocalImpl(
securityService);
List<String> protocolIds = protocolService
.findProtocolIdsByOwner(currentCreatedBy);
numFailures += this.update(protocolService, protocolIds,
currentCreatedBy, newCreatedBy);
PublicationService publicationService = new PublicationServiceLocalImpl(
securityService);
List<String> publicationIds = publicationService
.findPublicationIdsByOwner(currentCreatedBy);
numFailures += this.update(publicationService, publicationIds,
currentCreatedBy, newCreatedBy);
} catch (Exception e) {
String error = "Error in updating createBy field " + e;
logger.error(error, e);
throw new AdministrationException(error, e);
}
return numFailures;
}
private String newCreatedBy(String existingOwner, String currentOwner, String newOwner){
int copyIndex = existingOwner.indexOf("COPY");
String newCreatedBy="";
if(copyIndex ==0){
newCreatedBy = newOwner + ":" + existingOwner.substring(copyIndex);
}else{
String test = existingOwner.substring(0, currentOwner.length());
if(test.equals(currentOwner)){
newCreatedBy = newOwner;
}else{
newCreatedBy = existingOwner;
}
}
return newCreatedBy;
}
private Sample findFullyLoadedSampleById(String sampleId) throws Exception {
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
// load composition and characterization separate because of Hibernate
// join limitation
DetachedCriteria crit = DetachedCriteria.forClass(Sample.class).add(
Property.forName("id").eq(new Long(sampleId)));
Sample sample = null;
// load composition and characterization separate because of
// Hibernate join limitation
crit.setFetchMode("primaryPointOfContact", FetchMode.JOIN);
crit.setFetchMode("primaryPointOfContact.organization", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection", FetchMode.JOIN);
crit.setFetchMode("otherPointOfContactCollection.organization",
FetchMode.JOIN);
crit.setFetchMode("keywordCollection", FetchMode.JOIN);
crit.setFetchMode("publicationCollection", FetchMode.JOIN);
crit.setFetchMode("publicationCollection.authorCollection",
FetchMode.JOIN);
crit.setFetchMode("publicationCollection.keywordCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List result = appService.query(crit);
if (!result.isEmpty()) {
sample = (Sample) result.get(0);
}
if (sample == null) {
throw new NotExistException("Sample doesn't exist in the database");
}
// fully load composition
SampleComposition comp = this
.loadComposition(sample.getId().toString());
sample.setSampleComposition(comp);
// fully load characterizations
List<Characterization> chars = this.loadCharacterizations(sample
.getId().toString());
if (chars != null && !chars.isEmpty()) {
sample.setCharacterizationCollection(new HashSet<Characterization>(
chars));
} else {
sample.setCharacterizationCollection(null);
}
return sample;
}
private SampleComposition loadComposition(String sampleId) throws Exception {
SampleComposition composition = null;
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria
.forClass(SampleComposition.class);
crit.createAlias("sample", "sample");
crit.add(Property.forName("sample.id").eq(new Long(sampleId)));
crit.setFetchMode("nanomaterialEntityCollection", FetchMode.JOIN);
crit.setFetchMode("nanomaterialEntityCollection.fileCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"nanomaterialEntityCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setFetchMode(
"nanomaterialEntityCollection.composingElementCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"nanomaterialEntityCollection.composingElementCollection.inherentFunctionCollection.targetCollection",
FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection", FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection.fileCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"functionalizingEntityCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection.functionCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"functionalizingEntityCollection.functionCollection.targetCollection",
FetchMode.JOIN);
crit.setFetchMode("functionalizingEntityCollection.activationMethod",
FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection", FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection.fileCollection",
FetchMode.JOIN);
crit
.setFetchMode(
"chemicalAssociationCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection.associatedElementA",
FetchMode.JOIN);
crit.setFetchMode("chemicalAssociationCollection.associatedElementB",
FetchMode.JOIN);
crit.setFetchMode("fileCollection", FetchMode.JOIN);
crit.setFetchMode("fileCollection.keywordCollection", FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List result = appService.query(crit);
if (!result.isEmpty()) {
composition = (SampleComposition) result.get(0);
}
return composition;
}
private List<Characterization> loadCharacterizations(String sampleId)
throws Exception {
List<Characterization> chars = new ArrayList<Characterization>();
CustomizedApplicationService appService = (CustomizedApplicationService) ApplicationServiceProvider
.getApplicationService();
DetachedCriteria crit = DetachedCriteria
.forClass(Characterization.class);
crit.createAlias("sample", "sample");
crit.add(Property.forName("sample.id").eq(new Long(sampleId)));
// fully load characterization
crit.setFetchMode("pointOfContact", FetchMode.JOIN);
crit.setFetchMode("pointOfContact.organization", FetchMode.JOIN);
crit.setFetchMode("protocol", FetchMode.JOIN);
crit.setFetchMode("protocol.file", FetchMode.JOIN);
crit.setFetchMode("protocol.file.keywordCollection", FetchMode.JOIN);
crit.setFetchMode("experimentConfigCollection", FetchMode.JOIN);
crit.setFetchMode("experimentConfigCollection.technique",
FetchMode.JOIN);
crit.setFetchMode("experimentConfigCollection.instrumentCollection",
FetchMode.JOIN);
crit.setFetchMode("findingCollection", FetchMode.JOIN);
crit.setFetchMode("findingCollection.datumCollection", FetchMode.JOIN);
crit.setFetchMode(
"findingCollection.datumCollection.conditionCollection",
FetchMode.JOIN);
crit.setFetchMode("findingCollection.fileCollection", FetchMode.JOIN);
crit.setFetchMode("findingCollection.fileCollection.keywordCollection",
FetchMode.JOIN);
crit.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
List results = appService.query(crit);
for (Object obj : results) {
Characterization achar = (Characterization) obj;
chars.add(achar);
}
return chars;
}
}
| update associations
SVN-Revision: 19591
| software/cananolab-webapp/src/gov/nih/nci/cananolab/service/admin/impl/UpdateCreatedByServiceImpl.java | update associations |
|
Java | bsd-3-clause | ba0d3cb01ad06a2c1f6f768f6147ed957f6d5bff | 0 | NCIP/cananolab,NCIP/cananolab,NCIP/cananolab | package gov.nih.nci.calab.ui.submit;
/**
* This class associates a assay result file with a characterization.
*
* @author pansu
*/
/* CVS $Id: LoadDerivedBioAssayDataAction.java,v 1.16 2007-07-06 17:46:56 pansu Exp $ */
import gov.nih.nci.calab.dto.characterization.DerivedBioAssayDataBean;
import gov.nih.nci.calab.service.submit.SubmitNanoparticleService;
import gov.nih.nci.calab.ui.core.AbstractDispatchAction;
import gov.nih.nci.calab.ui.core.InitSessionSetup;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
import org.apache.struts.upload.FormFile;
import org.apache.struts.validator.DynaValidatorForm;
public class LoadDerivedBioAssayDataAction extends AbstractDispatchAction {
public ActionForward submit(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
DynaValidatorForm theForm = (DynaValidatorForm) form;
String particleName = (String) theForm.get("particleName");
String fileSource = (String) theForm.get("fileSource");
DerivedBioAssayDataBean fileBean = (DerivedBioAssayDataBean) theForm
.get("file");
String fileNumber = (String) theForm.get("fileNumber");
String characterizationName = (String) theForm
.get("characterizationName");
SubmitNanoparticleService service = new SubmitNanoparticleService();
fileBean.setCharacterizationName(characterizationName);
fileBean.setParticleName(particleName);
DerivedBioAssayDataBean savedFileBean = null;
if (fileSource.equals("new")) {
FormFile uploadedFile = (FormFile) theForm.get("uploadedFile");
fileBean.setUploadedFile(uploadedFile);
savedFileBean = service.saveCharacterizationFile(fileBean);
} else {
// updating existingFileBean with form data
if (fileBean.getId() != null) {
DerivedBioAssayDataBean existingFileBean = (DerivedBioAssayDataBean) service
.getFile(fileBean.getId());
existingFileBean.setTitle(fileBean.getTitle());
existingFileBean.setDescription(fileBean.getDescription());
existingFileBean.setVisibilityGroups(fileBean
.getVisibilityGroups());
existingFileBean.setKeywords(fileBean.getKeywords());
savedFileBean = service
.saveCharacterizationFile(existingFileBean);
} else {
throw new Exception(
"Please upload a new file if existing file drop-down list is empty or select a file from the drop-down list.");
}
}
request.getSession().setAttribute("characterizationFile" + fileNumber,
savedFileBean);
String forwardPage = (String) theForm.get("forwardPage");
ActionForward forward = new ActionForward();
forward.setPath(forwardPage);
return forward;
}
public ActionForward setup(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
InitSessionSetup.getInstance().clearWorkflowSession(session);
InitSessionSetup.getInstance().clearSearchSession(session);
InitSessionSetup.getInstance().clearInventorySession(session);
String particleName = request.getParameter("particleName");
InitSessionSetup.getInstance().setAllRunFiles(session, particleName);
String fileNumber = request.getParameter("fileNumber");
DynaValidatorForm theForm = (DynaValidatorForm) form;
theForm.set("particleName", particleName);
theForm.set("fileNumber", fileNumber);
theForm.set("forwardPage", (String) request
.getAttribute("loadFileForward"));
theForm.set("characterizationName", (String) request
.getAttribute("characterizationName"));
return mapping.getInputForward();
}
public ActionForward setupUpdate(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
InitSessionSetup.getInstance().clearWorkflowSession(session);
InitSessionSetup.getInstance().clearSearchSession(session);
InitSessionSetup.getInstance().clearInventorySession(session);
String fileId = request.getParameter("fileId");
SubmitNanoparticleService service = new SubmitNanoparticleService();
DerivedBioAssayDataBean fileBean = service
.getDerivedBioAssayData(fileId);
DynaValidatorForm theForm = (DynaValidatorForm) form;
theForm.set("file", fileBean);
String actionName = request.getParameter("actionName");
String formName = request.getParameter("formName");
request.setAttribute("actionName", actionName);
request.setAttribute("formName", formName);
return mapping.getInputForward();
}
public ActionForward setupView(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
return setupUpdate(mapping, form, request, response);
}
public ActionForward update(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
DynaValidatorForm theForm = (DynaValidatorForm) form;
DerivedBioAssayDataBean fileBean = (DerivedBioAssayDataBean) theForm
.get("file");
SubmitNanoparticleService service = new SubmitNanoparticleService();
service.updateDerivedBioAssayDataMetaData(fileBean);
ActionMessages msgs = new ActionMessages();
ActionMessage msg = new ActionMessage(
"message.updateDerivedBioAssayData", fileBean.getPath());
msgs.add("message", msg);
saveMessages(request, msgs);
return mapping.getInputForward();
}
public boolean loginRequired() {
return true;
}
}
| src/gov/nih/nci/calab/ui/submit/LoadDerivedBioAssayDataAction.java | package gov.nih.nci.calab.ui.submit;
/**
* This class associates a assay result file with a characterization.
*
* @author pansu
*/
/* CVS $Id: LoadDerivedBioAssayDataAction.java,v 1.15 2007-07-03 17:35:55 pansu Exp $ */
import gov.nih.nci.calab.dto.characterization.DerivedBioAssayDataBean;
import gov.nih.nci.calab.service.submit.SubmitNanoparticleService;
import gov.nih.nci.calab.ui.core.AbstractDispatchAction;
import gov.nih.nci.calab.ui.core.InitSessionSetup;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.action.ActionMessages;
import org.apache.struts.upload.FormFile;
import org.apache.struts.validator.DynaValidatorForm;
public class LoadDerivedBioAssayDataAction extends AbstractDispatchAction {
public ActionForward submit(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
ActionForward forward = null;
DynaValidatorForm theForm = (DynaValidatorForm) form;
String particleName = (String) theForm.get("particleName");
String fileSource = (String) theForm.get("fileSource");
DerivedBioAssayDataBean fileBean = (DerivedBioAssayDataBean) theForm
.get("file");
String fileNumber = (String) theForm.get("fileNumber");
String characterizationName = (String) theForm
.get("characterizationName");
SubmitNanoparticleService service = new SubmitNanoparticleService();
fileBean.setCharacterizationName(characterizationName);
fileBean.setParticleName(particleName);
DerivedBioAssayDataBean savedFileBean = null;
if (fileSource.equals("new")) {
FormFile uploadedFile = (FormFile) theForm.get("uploadedFile");
fileBean.setUploadedFile(uploadedFile);
savedFileBean = service.saveCharacterizationFile(fileBean);
} else {
// updating existingFileBean with form data
if (fileBean.getId() != null) {
DerivedBioAssayDataBean existingFileBean = (DerivedBioAssayDataBean) service
.getFile(fileBean.getId());
existingFileBean.setTitle(fileBean.getTitle());
existingFileBean.setDescription(fileBean.getDescription());
existingFileBean.setVisibilityGroups(fileBean
.getVisibilityGroups());
existingFileBean.setKeywords(fileBean.getKeywords());
savedFileBean = service
.saveCharacterizationFile(existingFileBean);
} else {
throw new Exception(
"Please upload a new file if existing file drop-down list is empty or select a file from the drop-down list.");
}
}
request.getSession().setAttribute("characterizationFile" + fileNumber,
savedFileBean);
String forwardPage = (String) theForm.get("forwardPage");
forward = mapping.findForward(forwardPage);
return forward;
}
public ActionForward setup(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
InitSessionSetup.getInstance().clearWorkflowSession(session);
InitSessionSetup.getInstance().clearSearchSession(session);
InitSessionSetup.getInstance().clearInventorySession(session);
String particleName = request.getParameter("particleName");
InitSessionSetup.getInstance().setAllRunFiles(session, particleName);
String fileNumber = request.getParameter("fileNumber");
DynaValidatorForm theForm = (DynaValidatorForm) form;
theForm.set("particleName", particleName);
theForm.set("fileNumber", fileNumber);
theForm.set("forwardPage", (String) request
.getAttribute("loadFileForward"));
theForm.set("characterizationName", (String) request
.getAttribute("characterizationName"));
return mapping.getInputForward();
}
public ActionForward setupUpdate(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
InitSessionSetup.getInstance().clearWorkflowSession(session);
InitSessionSetup.getInstance().clearSearchSession(session);
InitSessionSetup.getInstance().clearInventorySession(session);
String fileId = request.getParameter("fileId");
SubmitNanoparticleService service = new SubmitNanoparticleService();
DerivedBioAssayDataBean fileBean = service
.getDerivedBioAssayData(fileId);
DynaValidatorForm theForm = (DynaValidatorForm) form;
theForm.set("file", fileBean);
String actionName = request.getParameter("actionName");
String formName = request.getParameter("formName");
request.setAttribute("actionName", actionName);
request.setAttribute("formName", formName);
return mapping.getInputForward();
}
public ActionForward setupView(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
return setupUpdate(mapping, form, request, response);
}
public ActionForward update(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
DynaValidatorForm theForm = (DynaValidatorForm) form;
DerivedBioAssayDataBean fileBean = (DerivedBioAssayDataBean) theForm
.get("file");
SubmitNanoparticleService service = new SubmitNanoparticleService();
service.updateDerivedBioAssayDataMetaData(fileBean);
ActionMessages msgs = new ActionMessages();
ActionMessage msg = new ActionMessage(
"message.updateDerivedBioAssayData", fileBean.getPath());
msgs.add("message", msg);
saveMessages(request, msgs);
return mapping.getInputForward();
}
public boolean loginRequired() {
return true;
}
}
| updated forward for submit
SVN-Revision: 10241
| src/gov/nih/nci/calab/ui/submit/LoadDerivedBioAssayDataAction.java | updated forward for submit |
|
Java | mit | 673d9c2ca7b2de98c7e958c8bf90c6b05bcd9be9 | 0 | socrata/datasync,socrata/datasync | package com.socrata.datasync.config.controlfile;
import com.socrata.datasync.PublishMethod;
import com.socrata.datasync.Utils;
import com.socrata.datasync.job.IntegrationJob;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonPropertyOrder;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import java.util.HashMap;
@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown=true)
@JsonPropertyOrder(alphabetic=true)
public class ControlFile {
public String action;
public String opaque;
public FileTypeControl csv;
public FileTypeControl tsv;
public Boolean replacePreviousQueued;
// NB: when using a mapper to read this class, you must enable
// DeserializationConfig.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY, if either of the timestamp formats
// in csvControl or tsvControl are strings, rather than arrays of strings.
public ControlFile() {}
public ControlFile(String action, String opaque, FileTypeControl csvControl, FileTypeControl tsvControl, Boolean replacePreviousQueued) {
this.action = action;
this.opaque = opaque;
this.csv = csvControl;
this.tsv = tsvControl;
}
// Hack city. Apparently the ControlFile must have both a csv and a tsv control file, even though only one may be
// active at any given time. The control file editor will be manipulating the active one, regardless of type.
// Thus, I need to return the active FTC here that can later be manipulated throughout the model.
@JsonIgnore
public FileTypeControl getFileTypeControl(){
if (csv != null)
return csv;
else
return tsv;
}
@JsonIgnore
public FileTypeControl getCsvFtc(){
return csv;
}
@JsonIgnore
public FileTypeControl getTsvFtc(){
return tsv;
}
/**
* Generates the default ControlFile object based on given job parameters
*
* @param fileToPublish filename of file to publish (.tsv or .csv file)
* @param publishMethod to use to publish (upsert, append, replace, or delete)
* NOTE: this option will be overriden if this control file is for an ftp job
* @param columns the column headers correcsponding to the filetoPublish, needed if it lacks headers
* @param useSocrataGeocoding if true use Socrata's geocoding to geocode Location columns
* @return content of control.json based on given job parameters
*/
public static ControlFile generateControlFile(final String fileToPublish,
final PublishMethod publishMethod,
final String[] columns,
final boolean useSocrataGeocoding,
final boolean hasHeaderRow) {
String fileToPublishExtension = Utils.getFileExtension(fileToPublish);
boolean isCsv = fileToPublishExtension.equalsIgnoreCase("csv");
String quote = isCsv ? "\"" : "\u0000";
FileTypeControl ftc = new FileTypeControl()
.columns(columns)
.encoding("utf-8")
.hasHeaderRow(hasHeaderRow)
.quote(quote);
if (!PublishMethod.delete.equals(publishMethod)) {
int skip = 0;
String separator = isCsv ? "," : "\t";
//Adding our standard export formats so that a customer can easily round-trip data into the system.
String[] timeFormats = new String[]{"ISO8601", "MM/dd/yy", "MM/dd/yyyy", "dd-MMM-yyyy","MM/dd/yyyy HH:mm:ss a Z","MM/dd/yyyy HH:mm:ss a"};
ftc.emptyTextIsNull(true)
.filePath(fileToPublish)
.ignoreColumns(new String[]{})
.fixedTimestampFormat(timeFormats)
.floatingTimestampFormat(timeFormats)
.separator(separator)
.skip(skip)
.timezone("UTC")
.useSocrataGeocoding(useSocrataGeocoding)
.trimWhitespace(true)
.trimServerWhitespace(true)
.overrides(new HashMap<String, ColumnOverride>());
// for replace jobs, calculate column statistics
if (PublishMethod.replace.equals(publishMethod))
ftc.columnStatistics(true);
}
if (isCsv) {
return new ControlFile(Utils.capitalizeFirstLetter(publishMethod.name()), null, ftc, null, null);
} else {
return new ControlFile(Utils.capitalizeFirstLetter(publishMethod.name()), null, null, ftc, null);
}
}
public String generateAndAddOpaqueUUID() {
String uuid = Utils.generateRequestId();
this.opaque = uuid;
return uuid;
}
}
| src/main/java/com/socrata/datasync/config/controlfile/ControlFile.java | package com.socrata.datasync.config.controlfile;
import com.socrata.datasync.PublishMethod;
import com.socrata.datasync.Utils;
import com.socrata.datasync.job.IntegrationJob;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonPropertyOrder;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import java.util.HashMap;
@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown=true)
@JsonPropertyOrder(alphabetic=true)
public class ControlFile {
public String action;
public String opaque;
public FileTypeControl csv;
public FileTypeControl tsv;
public Boolean replacePreviousQueued;
// NB: when using a mapper to read this class, you must enable
// DeserializationConfig.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY, if either of the timestamp formats
// in csvControl or tsvControl are strings, rather than arrays of strings.
public ControlFile() {}
public ControlFile(String action, String opaque, FileTypeControl csvControl, FileTypeControl tsvControl, Boolean replacePreviousQueued) {
this.action = action;
this.opaque = opaque;
this.csv = csvControl;
this.tsv = tsvControl;
}
// Hack city. Apparently the ControlFile must have both a csv and a tsv control file, even though only one may be
// active at any given time. The control file editor will be manipulating the active one, regardless of type.
// Thus, I need to return the active FTC here that can later be manipulated throughout the model.
@JsonIgnore
public FileTypeControl getFileTypeControl(){
if (csv != null)
return csv;
else
return tsv;
}
@JsonIgnore
public FileTypeControl getCsvFtc(){
return csv;
}
@JsonIgnore
public FileTypeControl getTsvFtc(){
return tsv;
}
/**
* Generates the default ControlFile object based on given job parameters
*
* @param fileToPublish filename of file to publish (.tsv or .csv file)
* @param publishMethod to use to publish (upsert, append, replace, or delete)
* NOTE: this option will be overriden if this control file is for an ftp job
* @param columns the column headers correcsponding to the filetoPublish, needed if it lacks headers
* @param useSocrataGeocoding if true use Socrata's geocoding to geocode Location columns
* @return content of control.json based on given job parameters
*/
public static ControlFile generateControlFile(final String fileToPublish,
final PublishMethod publishMethod,
final String[] columns,
final boolean useSocrataGeocoding,
final boolean hasHeaderRow) {
String fileToPublishExtension = Utils.getFileExtension(fileToPublish);
boolean isCsv = fileToPublishExtension.equalsIgnoreCase("csv");
String quote = isCsv ? "\"" : "\u0000";
FileTypeControl ftc = new FileTypeControl()
.columns(columns)
.encoding("utf-8")
.hasHeaderRow(hasHeaderRow)
.quote(quote);
if (!PublishMethod.delete.equals(publishMethod)) {
int skip = 0;
String separator = isCsv ? "," : "\t";
//Adding our standard export formats so that a customer can easily round-trip data into the system.
String[] timeFormats = new String[]{"ISO8601", "MM/dd/yy", "MM/dd/yyyy", "dd-MMM-yyyy","MM/dd/yyyy HH:mm:ss a Z","MM/dd/yyyy HH:mm:ss a"};
ftc.emptyTextIsNull(true)
.filePath(fileToPublish)
.ignoreColumns(new String[]{})
.fixedTimestampFormat(timeFormats)
.floatingTimestampFormat(timeFormats)
.separator(separator)
.skip(skip)
.timezone("UTC")
.useSocrataGeocoding(useSocrataGeocoding)
.trimWhitespace(true)
.trimServerWhitespace(true)
.overrides(new HashMap<String, ColumnOverride>())
.columnStatistics(true);
}
if (isCsv) {
return new ControlFile(Utils.capitalizeFirstLetter(publishMethod.name()), null, ftc, null, null);
} else {
return new ControlFile(Utils.capitalizeFirstLetter(publishMethod.name()), null, null, ftc, null);
}
}
public String generateAndAddOpaqueUUID() {
String uuid = Utils.generateRequestId();
this.opaque = uuid;
return uuid;
}
}
| added check to set columnStatistics only on replace jobs
| src/main/java/com/socrata/datasync/config/controlfile/ControlFile.java | added check to set columnStatistics only on replace jobs |
|
Java | mit | e31154d7b89649e0e2f40fad6b8c4f9e92baf9f5 | 0 | hector-client/hector,koa/hector,Ursula/hector,normanmaurer/hector,1and1/hector,hector-client/hector,rantav/hector,Ursula/hector,apigee/hector | package me.prettyprint.cassandra;
import java.io.IOException;
import me.prettyprint.cassandra.testutils.EmbeddedServerHelper;
import org.apache.thrift.transport.TTransportException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
/**
* Base class for test cases that need access to EmbeddedServerHelper
*
* @author Nate McCall ([email protected])
*
*/
public abstract class BaseEmbededServerSetupTest {
private static EmbeddedServerHelper embedded;
/**
* Set embedded cassandra up and spawn it in a new thread.
*
* @throws TTransportException
* @throws IOException
* @throws InterruptedException
*/
@BeforeClass
public static void setup() throws TTransportException, IOException, InterruptedException {
embedded = new EmbeddedServerHelper();
embedded.setup();
}
@AfterClass
public static void teardown() throws IOException {
embedded.teardown();
embedded = null;
}
}
| src/test/java/me/prettyprint/cassandra/BaseEmbededServerSetupTest.java | package me.prettyprint.cassandra;
import java.io.IOException;
import me.prettyprint.cassandra.testutils.EmbeddedServerHelper;
import org.apache.thrift.transport.TTransportException;
import org.junit.AfterClass;
import org.junit.BeforeClass;
/**
* Base class for test cases that need access to EmbeddedServerHelper
*
* @author Nate McCall ([email protected])
*
*/
public abstract class BaseEmbededServerSetupTest {
private static EmbeddedServerHelper embedded;
/**
* Set embedded cassandra up and spawn it in a new thread.
*
* @throws TTransportException
* @throws IOException
* @throws InterruptedException
*/
@BeforeClass
public static void setup() throws TTransportException, IOException, InterruptedException {
embedded = new EmbeddedServerHelper();
embedded.setup();
}
@AfterClass
public static void teardown() throws IOException {
embedded.teardown();
}
}
| Add some cleanup after teardown of BaseEmbededServerSetupTest
| src/test/java/me/prettyprint/cassandra/BaseEmbededServerSetupTest.java | Add some cleanup after teardown of BaseEmbededServerSetupTest |
|
Java | mit | ea260071b7a52cab40ac450968173fa1eeee2392 | 0 | acknak/NakloidGUI | package nak.nakloidGUI.actions.options;
import org.eclipse.jface.dialogs.MessageDialog;
import nak.nakloidGUI.actions.AbstractAction;
import nak.nakloidGUI.coredata.CoreData;
import nak.nakloidGUI.gui.MainWindow;
public class AboutNakloidAction extends AbstractAction {
public AboutNakloidAction(MainWindow mainWindow, CoreData coreData) {
super(mainWindow, coreData);
setText("&Nakloidについて(&A)");
}
@Override
public void run() {
MessageDialog.openInformation(mainWindow.getShell(), "Nakloidについて", "Nakloid (GUI) ver.160710\n"
+ "https://github.com/acknak/Nakloid/\n\n"
+ "this software includes the work that is distributed in the Apache License 2.0\n\n"
+ "Library List (Nakloid)\n"
+ "・Boost C++ Libraries\n"
+ " http://www.boost.org/\n"
+ "・FFTSS\n"
+ " http://www.ssisc.org/fftss/\n"
+ "・RapidJSON\n"
+ " http://rapidjson.org/\n\n"
+ "Library List (WORLD for Nakloid)\n"
+ "・WORLD\n"
+ " http://ml.cs.yamanashi.ac.jp/world/\n"
+ "・Boost C++ Libraries\n"
+ " http://www.boost.org/\n"
+ "・RapidJSON\n"
+ " http://rapidjson.org/\n\n"
+ "Library List (NakloidGUI)\n"
+ "・SWT/JFace\n"
+ " https://www.eclipse.org/swt/\n"
+ "・Jackson\n"
+ " https://github.com/FasterXML/jackson\n\n"
+ "バグ報告・感想等は [email protected] か @acknak39 まで");
}
}
| nak/nakloidGUI/actions/options/AboutNakloidAction.java | package nak.nakloidGUI.actions.options;
import org.eclipse.jface.dialogs.MessageDialog;
import nak.nakloidGUI.actions.AbstractAction;
import nak.nakloidGUI.coredata.CoreData;
import nak.nakloidGUI.gui.MainWindow;
public class AboutNakloidAction extends AbstractAction {
public AboutNakloidAction(MainWindow mainWindow, CoreData coreData) {
super(mainWindow, coreData);
setText("&Nakloidについて(&A)");
}
@Override
public void run() {
MessageDialog.openInformation(mainWindow.getShell(), "Nakloidについて", "Nakloid (GUI) ver.160429\n"
+ "https://github.com/acknak/Nakloid/\n\n"
+ "this software includes the work that is distributed in the Apache License 2.0\n\n"
+ "Library List (Nakloid)\n"
+ "・Boost C++ Libraries\n"
+ "・FFTSS\n"
+ "・RapidJSON\n\n"
+ "Library List (NakloidGUI)\n"
+ "・SWT/JFace\n"
+ "・Jackson\n\n"
+ "バグ報告・感想等は [email protected] か @acknak39 まで");
}
}
| version up
| nak/nakloidGUI/actions/options/AboutNakloidAction.java | version up |
|
Java | mit | 5cf84289e5cfe04a0888cfec0e2af5004009fa66 | 0 | bx5974/sikuli,bx5974/sikuli,bx5974/sikuli,bx5974/sikuli,bx5974/sikuli,sikuli/sikuli,sikuli/sikuli,sikuli/sikuli,bx5974/sikuli,sikuli/sikuli,sikuli/sikuli,sikuli/sikuli,sikuli/sikuli,bx5974/sikuli | package org.sikuli.script;
import java.io.*;
import java.awt.*;
import java.awt.datatransfer.*;
import java.awt.event.*;
import java.awt.MouseInfo;
import java.lang.reflect.Constructor;
public class Env {
public static Location getMouseLocation() throws HeadlessException{
Point loc = MouseInfo.getPointerInfo().getLocation();
return new Location(loc.x, loc.y);
}
public static String getOSVersion(){
return System.getProperty("os.version");
}
public static OS getOS(){
String os = System.getProperty("os.name").toLowerCase();
if( os.startsWith("mac os x") )
return OS.MAC;
else if( os.startsWith("windows"))
return OS.WINDOWS;
else if( os.startsWith("linux"))
return OS.LINUX;
return OS.NOT_SUPPORTED;
}
public static boolean isWindows(){
return getOS() == OS.WINDOWS;
}
public static boolean isLinux(){
return getOS() == OS.LINUX;
}
public static boolean isMac(){
return getOS() == OS.MAC;
}
public static String getSeparator(){
if(isWindows())
return ";";
return ":";
}
public static String getClipboard(){
Transferable content = Clipboard.getSystemClipboard().getContents(null);
try{
if(content.isDataFlavorSupported(DataFlavor.stringFlavor))
return content.getTransferData(DataFlavor.stringFlavor).toString();
}
catch(UnsupportedFlavorException e){
System.out.println("UnsupportedFlavorException: " + content);
}
catch(IOException e){
e.printStackTrace();
}
return "";
}
static String getOSUtilClass(){
String pkg = "org.sikuli.script.";
switch(getOS()){
case MAC: return pkg+"MacUtil";
case WINDOWS: return pkg+"Win32Util";
case LINUX: return pkg+"LinuxUtil";
default:
System.err.println("Warning: Sikuli doesn't fully support your OS.");
return pkg+"DummyUtil";
}
}
static OSUtil _osUtil = null;
public static OSUtil getOSUtil(){
if(_osUtil == null){
try{
Class c = Class.forName(getOSUtilClass());
Constructor constr = c.getConstructor();
_osUtil = (OSUtil)constr.newInstance();
}
catch(Exception e){
Debug.error("Can't create OS Util: " + e.getMessage());
}
}
return _osUtil;
}
static boolean isLockOn(char key){
Toolkit tk = Toolkit.getDefaultToolkit();
switch(key){
case '\ue025': return tk.getLockingKeyState(KeyEvent.VK_SCROLL_LOCK);
case '\ue027': return tk.getLockingKeyState(KeyEvent.VK_CAPS_LOCK);
case '\ue03B': return tk.getLockingKeyState(KeyEvent.VK_NUM_LOCK);
default:
return false;
}
}
static int getHotkeyModifier(){
if(getOS() == OS.MAC)
return KeyEvent.VK_META;
else
return KeyEvent.VK_CONTROL;
}
static String getSikuliDataPath(){
String home, sikuliPath;
if(isWindows()){
home = System.getenv("APPDATA");
sikuliPath = "Sikuli";
}
else if(isMac()){
home = System.getProperty("user.home") +
"/Library/Application Support";
sikuliPath = "Sikuli";
}
else{
home = System.getProperty("user.home");
sikuliPath = ".sikuli";
}
File fHome = new File(home, sikuliPath);
return fHome.getAbsolutePath();
}
}
| sikuli-script/src/main/java/org/sikuli/script/Env.java | package org.sikuli.script;
import java.io.*;
import java.awt.*;
import java.awt.datatransfer.*;
import java.awt.event.*;
import java.awt.MouseInfo;
import java.lang.reflect.Constructor;
public class Env {
public static Location getMouseLocation() throws HeadlessException{
Point loc = MouseInfo.getPointerInfo().getLocation();
return new Location(loc.x, loc.y);
}
public static String getOSVersion(){
return System.getProperty("os.version");
}
public static OS getOS(){
String os = System.getProperty("os.name").toLowerCase();
if( os.startsWith("mac os x") )
return OS.MAC;
else if( os.startsWith("windows"))
return OS.WINDOWS;
else if( os.startsWith("linux"))
return OS.LINUX;
return OS.NOT_SUPPORTED;
}
public static boolean isWindows(){
return getOS() == OS.WINDOWS;
}
public static boolean isLinux(){
return getOS() == OS.LINUX;
}
public static boolean isMac(){
return getOS() == OS.MAC;
}
public static String getSeparator(){
if(isWindows())
return ";";
return ":";
}
public static String getClipboard(){
Transferable content = Clipboard.getSystemClipboard().getContents(null);
try{
if(content.isDataFlavorSupported(DataFlavor.stringFlavor))
return content.getTransferData(DataFlavor.stringFlavor).toString();
}
catch(UnsupportedFlavorException e){
System.out.println("UnsupportedFlavorException: " + content);
}
catch(IOException e){
e.printStackTrace();
}
return "";
}
static String getOSUtilClass(){
String pkg = "org.sikuli.script.";
switch(getOS()){
case MAC: return pkg+"MacUtil";
case WINDOWS: return pkg+"Win32Util";
case LINUX: return pkg+"LinuxUtil";
default:
System.err.println("Warning: Sikuli doesn't fully support your OS.");
return pkg+"DummyUtil";
}
}
static OSUtil _osUtil = null;
public static OSUtil getOSUtil(){
if(_osUtil == null){
try{
Class c = Class.forName(getOSUtilClass());
Constructor constr = c.getConstructor();
_osUtil = (OSUtil)constr.newInstance();
}
catch(Exception e){
Debug.error("Can't create OS Util: " + e.getMessage());
}
}
return _osUtil;
}
static int getHotkeyModifier(){
if(getOS() == OS.MAC)
return KeyEvent.VK_META;
else
return KeyEvent.VK_CONTROL;
}
static String getSikuliDataPath(){
String home, sikuliPath;
if(isWindows()){
home = System.getenv("APPDATA");
sikuliPath = "Sikuli";
}
else if(isMac()){
home = System.getProperty("user.home") +
"/Library/Application Support";
sikuliPath = "Sikuli";
}
else{
home = System.getProperty("user.home");
sikuliPath = ".sikuli";
}
File fHome = new File(home, sikuliPath);
return fHome.getAbsolutePath();
}
}
| NEW API:
boolean Env.isLockOn(Key.CAPS_LOCK|Key.NUM_LOCK|Key.SCROLL_LOCK)
returns the state of the given lock key.
| sikuli-script/src/main/java/org/sikuli/script/Env.java | NEW API: boolean Env.isLockOn(Key.CAPS_LOCK|Key.NUM_LOCK|Key.SCROLL_LOCK) returns the state of the given lock key. |
|
Java | mit | 547e515968723334bcd8edff05c88870b5b9924f | 0 | 412750168/swipemenulistview | /*
* The MIT License (MIT)
*
* Copyright (c) 2015 baoyongzhang <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.baoyz.swipemenulistview.demo;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
/**
* SwipeMenuListView
* Created by baoyz on 15/6/29.
*/
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//add to test git
//add this data int master/remote
//add new data
//add to test git
//in dev branch: add data
}
public void onClick(View v){
switch (v.getId()) {
case R.id.button1:
startActivity(new Intent(this, SimpleActivity.class));
break;
case R.id.button2:
startActivity(new Intent(this, DifferentMenuActivity.class));
break;
}
}
}
| demo/src/main/java/com/baoyz/swipemenulistview/demo/MainActivity.java | /*
* The MIT License (MIT)
*
* Copyright (c) 2015 baoyongzhang <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.baoyz.swipemenulistview.demo;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
/**
* SwipeMenuListView
* Created by baoyz on 15/6/29.
*/
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
//add to test git
//add this data int master/remote
//add new data
}
public void onClick(View v){
switch (v.getId()) {
case R.id.button1:
startActivity(new Intent(this, SimpleActivity.class));
break;
case R.id.button2:
startActivity(new Intent(this, DifferentMenuActivity.class));
break;
}
}
}
| 1.in dev branch :add data
| demo/src/main/java/com/baoyz/swipemenulistview/demo/MainActivity.java | 1.in dev branch :add data |
|
Java | mit | 9faef1451155214322629c72b88e2587319f328a | 0 | Innovimax-SARL/mix-them | package innovimax.mixthem.arguments;
import innovimax.mixthem.io.InputResource;
import java.io.File;
import java.io.InputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.util.Enumeration;
import java.util.EnumMap;
import java.util.Iterator;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* <p>Mix-them command line arguments management.</p>
* @author Innovimax
* @version 1.0
*/
public class Arguments {
private Rule rule = null;
private Map<RuleParam, ParamValue> ruleParams = null;
private InputResource input1 = null;
private InputResource input2 = null;
private void setRule(Rule rule) {
this.rule = rule;
}
public Rule getRule() {
return this.rule;
}
void setRuleParameters(Map<RuleParam, ParamValue> ruleParams) {
this.ruleParams = ruleParams;
}
public Map<RuleParam, ParamValue> getRuleParameters() {
return this.ruleParams;
}
void setFirstInput(InputResource input) {
this.input1 = input;
}
public InputResource getFirstInput() {
return this.input1;
}
void setSecondInput(InputResource input) {
this.input2 = input;
}
public InputResource getSecondInput() {
return this.input2;
}
public static Arguments checkArguments(String[] args) throws ArgumentException {
Arguments mixArgs = new Arguments();
int index = 0;
Rule rule = findRuleArgument(args, index, "rule");
Map<RuleParam, ParamValue> ruleParams = null;
if (rule != null) {
index++;
ruleParams = findRuleParameters(args, index, rule);
index += ruleParams.size();
} else {
rule = Rule.ADD;
}
mixArgs.setRule(rule);
mixArgs.setRuleParameters(ruleParams);
String zipOption = findZipOptionArgument(args, index);
if (zipOption == null) {
File file1 = findFileArgument(args, index, "file1");
File file2 = findFileArgument(args, ++index, "file2");
mixArgs.setFirstInput(InputResource.createFile(file1));
mixArgs.setSecondInput(InputResource.createFile(file2));
} else {
ZipFile zipFile = new ZipFile(findFileArgument(args, ++index, zipOption));
InputStream input1 = extractFileEntry(zipFile, 1, "file1");
InputStream input2 = extractFileEntry(zipFile, 2, "file2");
mixArgs.setFirstInput(InputResource.createInputStream(input1));
mixArgs.setSecondInput(InputResource.createInputStream(input2));
}
return mixArgs;
}
private static Rule findRuleArgument(String[] args, int index, String name) throws ArgumentException {
Rule rule = null;
if (args.length > index) {
final String ruleString = args[index];
if (ruleString.startsWith("-")) {
rule = Rule.findByName(ruleString.substring(1));
if (rule == null) {
throw new ArgumentException(name + " argument is incorrect: " + ruleString);
}
}
}
return rule;
}
private static Map<RuleParam, ParamValue> findRuleParameters(String[] args, int index, Rule rule) throws ArgumentException {
Map<RuleParam, ParamValue> map = new EnumMap<RuleParam, ParamValue>(RuleParam.class);
Iterator<RuleParam> iterator = rule.getParams().iterator();
if (iterator.hasNext()) {
RuleParam param = iterator.next();
if (args.length > index) {
String arg = args[index];
if (arg.startsWith("#")) {
final String paramString = arg.substring(1);
try {
ParamValue value = param.createValue(paramString);
map.put(param, value);
index++;
} catch (NumberFormatException e) {
throw new ArgumentException("[" + param.getName() + "] parameter is incorrect: " + paramString);
}
}
}
}
return map;
}
private static File findFileArgument(String[] args, int index, String name) throws ArgumentException {
File file = null;
if (args.length > index) {
String filepath = args[index];
file = new File(filepath);
final Path path = file.toPath();
if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) {
if (!Files.isReadable(path)) {
throw new ArgumentException(name + " cannot be read: " + filepath);
}
} else {
throw new ArgumentException(name + " not found: " + filepath);
}
} else {
throw new ArgumentException(name + " argument missing.");
}
return file;
}
private static String findZipOptionArgument(String[] args, int index) {
String zipOption = null;
if (args.length > index && (args[index].equals("--zip") || args[index].equals("--jar"))) {
zipOption = args[index].substring(2);
}
return zipOption;
}
private static InputStream extractFileEntry(ZipFile zipFile, int index, String name) throws ArgumentException, IOException {
InputStream input = null;
if (zipFile.size() >= index) {
Enumeration entries = zipFile.entries();
if (index > 1) {
entries.nextElement();
}
input = zipFile.getInputStream(entries.nextElement());
} else {
throw new ArgumentException(name + " entry missing.");
}
return input;
}
public static void printUsage() {
System.out.println(" ");
System.out.println("Usage:");
System.out.println(" ");
System.out.println(" mix-them file1 file2");
System.out.println(" (will generate any file based on file1 and file2)");
System.out.println(" ");
System.out.println(" mix-them -[rule] file1 file2");
System.out.println(" (will generate a file based on the rule)");
System.out.println(" ");
System.out.println(" Here are the list of rules");
for(Rule rule : Rule.values()) {
System.out.print(" - " + rule.getName());
for(RuleParam param : rule.getParams()) {
System.out.print(" [#" + param.getName() + "]");
}
System.out.println(": " + rule.getDescription());
}
System.out.println(" ");
}
}
| src/main/java/innovimax/mixthem/arguments/Arguments.java | package innovimax.mixthem.arguments;
import innovimax.mixthem.io.InputResource;
import java.io.File;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.util.Enumeration;
import java.util.EnumMap;
import java.util.Iterator;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* <p>Mix-them command line arguments management.</p>
* @author Innovimax
* @version 1.0
*/
public class Arguments {
private Rule rule = null;
private Map<RuleParam, ParamValue> ruleParams = null;
private InputResource input1 = null;
private InputResource input2 = null;
private void setRule(Rule rule) {
this.rule = rule;
}
public Rule getRule() {
return this.rule;
}
void setRuleParameters(Map<RuleParam, ParamValue> ruleParams) {
this.ruleParams = ruleParams;
}
public Map<RuleParam, ParamValue> getRuleParameters() {
return this.ruleParams;
}
void setFirstInput(InputResource input) {
this.input1 = input;
}
public InputResource getFirstInput() {
return this.input1;
}
void setSecondInput(InputResource input) {
this.input2 = input;
}
public InputResource getSecondInput() {
return this.input2;
}
public static Arguments checkArguments(String[] args) throws ArgumentException {
Arguments mixArgs = new Arguments();
int index = 0;
Rule rule = findRuleArgument(args, index, "rule");
Map<RuleParam, ParamValue> ruleParams = null;
if (rule != null) {
index++;
ruleParams = findRuleParameters(args, index, rule);
index += ruleParams.size();
} else {
rule = Rule.ADD;
}
mixArgs.setRule(rule);
mixArgs.setRuleParameters(ruleParams);
String zipOption = findZipOptionArgument(args, index);
if (zipOption == null) {
File file1 = findFileArgument(args, index, "file1");
File file2 = findFileArgument(args, ++index, "file2");
mixArgs.setFirstInput(InputResource.createFile(file1));
mixArgs.setSecondInput(InputResource.createFile(file2));
} else {
ZipFile zipFile = new ZipFile(findFileArgument(args, ++index, zipOption));
InputStream input1 = extractFileEntry(zipFile, 1, "file1");
InputStream input2 = extractFileEntry(zipFile, 2, "file2");
mixArgs.setFirstInput(InputResource.createInputStream(input1));
mixArgs.setSecondInput(InputResource.createInputStream(input2));
}
return mixArgs;
}
private static Rule findRuleArgument(String[] args, int index, String name) throws ArgumentException {
Rule rule = null;
if (args.length > index) {
final String ruleString = args[index];
if (ruleString.startsWith("-")) {
rule = Rule.findByName(ruleString.substring(1));
if (rule == null) {
throw new ArgumentException(name + " argument is incorrect: " + ruleString);
}
}
}
return rule;
}
private static Map<RuleParam, ParamValue> findRuleParameters(String[] args, int index, Rule rule) throws ArgumentException {
Map<RuleParam, ParamValue> map = new EnumMap<RuleParam, ParamValue>(RuleParam.class);
Iterator<RuleParam> iterator = rule.getParams().iterator();
if (iterator.hasNext()) {
RuleParam param = iterator.next();
if (args.length > index) {
String arg = args[index];
if (arg.startsWith("#")) {
final String paramString = arg.substring(1);
try {
ParamValue value = param.createValue(paramString);
map.put(param, value);
index++;
} catch (NumberFormatException e) {
throw new ArgumentException("[" + param.getName() + "] parameter is incorrect: " + paramString);
}
}
}
}
return map;
}
private static File findFileArgument(String[] args, int index, String name) throws ArgumentException {
File file = null;
if (args.length > index) {
String filepath = args[index];
file = new File(filepath);
final Path path = file.toPath();
if (Files.exists(path, LinkOption.NOFOLLOW_LINKS)) {
if (!Files.isReadable(path)) {
throw new ArgumentException(name + " cannot be read: " + filepath);
}
} else {
throw new ArgumentException(name + " not found: " + filepath);
}
} else {
throw new ArgumentException(name + " argument missing.");
}
return file;
}
private static String findZipOptionArgument(String[] args, int index) {
String zipOption = null;
if (args.length > index && (args[index].equals("--zip") || args[index].equals("--jar"))) {
zipOption = args[index].substring(2);
}
return zipOption;
}
private static InputStream extractFileEntry(ZipFile zipFile, int index, String name) throws ArgumentException, IOException {
InputStream input = null;
if (zipFile.size() >= index) {
Enumeration<ZipEntry> entries = zipFile.entries();
if (index > 1) {
entries.nextElement();
}
input = zipFile.getInputStream(entries.nextElement());
} else {
throw new ArgumentException(name + " entry missing.");
}
return input;
}
public static void printUsage() {
System.out.println(" ");
System.out.println("Usage:");
System.out.println(" ");
System.out.println(" mix-them file1 file2");
System.out.println(" (will generate any file based on file1 and file2)");
System.out.println(" ");
System.out.println(" mix-them -[rule] file1 file2");
System.out.println(" (will generate a file based on the rule)");
System.out.println(" ");
System.out.println(" Here are the list of rules");
for(Rule rule : Rule.values()) {
System.out.print(" - " + rule.getName());
for(RuleParam param : rule.getParams()) {
System.out.print(" [#" + param.getName() + "]");
}
System.out.println(": " + rule.getDescription());
}
System.out.println(" ");
}
}
| Update Arguments.java | src/main/java/innovimax/mixthem/arguments/Arguments.java | Update Arguments.java |
|
Java | mit | eb26ff3e1870c436dffef4bfe49411e90a03b994 | 0 | obraliar/jabref,tschechlovdev/jabref,oscargus/jabref,Braunch/jabref,oscargus/jabref,obraliar/jabref,shitikanth/jabref,bartsch-dev/jabref,sauliusg/jabref,zellerdev/jabref,Mr-DLib/jabref,grimes2/jabref,mredaelli/jabref,Braunch/jabref,mairdl/jabref,Siedlerchr/jabref,ayanai1/jabref,jhshinn/jabref,shitikanth/jabref,tobiasdiez/jabref,ayanai1/jabref,grimes2/jabref,mredaelli/jabref,tschechlovdev/jabref,mairdl/jabref,mairdl/jabref,ayanai1/jabref,tschechlovdev/jabref,motokito/jabref,mredaelli/jabref,obraliar/jabref,tschechlovdev/jabref,shitikanth/jabref,oscargus/jabref,Mr-DLib/jabref,bartsch-dev/jabref,Siedlerchr/jabref,JabRef/jabref,zellerdev/jabref,obraliar/jabref,grimes2/jabref,Braunch/jabref,ayanai1/jabref,jhshinn/jabref,zellerdev/jabref,bartsch-dev/jabref,obraliar/jabref,sauliusg/jabref,JabRef/jabref,sauliusg/jabref,oscargus/jabref,ayanai1/jabref,bartsch-dev/jabref,Siedlerchr/jabref,bartsch-dev/jabref,mairdl/jabref,mairdl/jabref,tobiasdiez/jabref,shitikanth/jabref,oscargus/jabref,JabRef/jabref,sauliusg/jabref,motokito/jabref,Braunch/jabref,Siedlerchr/jabref,jhshinn/jabref,JabRef/jabref,Braunch/jabref,mredaelli/jabref,motokito/jabref,jhshinn/jabref,tobiasdiez/jabref,zellerdev/jabref,shitikanth/jabref,Mr-DLib/jabref,tschechlovdev/jabref,grimes2/jabref,Mr-DLib/jabref,Mr-DLib/jabref,grimes2/jabref,motokito/jabref,jhshinn/jabref,tobiasdiez/jabref,motokito/jabref,zellerdev/jabref,mredaelli/jabref | /* Copyright (C) 2003-2016 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.logic.xmp;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.*;
import javax.xml.transform.TransformerException;
import net.sf.jabref.*;
import net.sf.jabref.exporter.LatexFieldFormatter;
import net.sf.jabref.importer.fileformat.BibtexParser;
import net.sf.jabref.importer.ParserResult;
import net.sf.jabref.model.database.BibDatabaseMode;
import net.sf.jabref.model.entry.*;
import net.sf.jabref.bibtex.BibEntryWriter;
import net.sf.jabref.model.database.BibDatabase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jempbox.impl.DateConverter;
import org.apache.jempbox.impl.XMLUtil;
import org.apache.jempbox.xmp.XMPMetadata;
import org.apache.jempbox.xmp.XMPSchema;
import org.apache.jempbox.xmp.XMPSchemaDublinCore;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.exceptions.COSVisitorException;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentCatalog;
import org.apache.pdfbox.pdmodel.PDDocumentInformation;
import org.apache.pdfbox.pdmodel.common.PDMetadata;
import org.w3c.dom.Document;
/**
* XMPUtils provide support for reading and writing BibTex data as XMP-Metadata
* in PDF-documents.
*/
public class XMPUtil {
private static final Log LOGGER = LogFactory.getLog(XMPUtil.class);
/**
* Convenience method for readXMP(File).
*
* @param filename
* The filename from which to open the file.
* @return BibtexEntryies found in the PDF or an empty list
* @throws IOException
*/
public static List<BibEntry> readXMP(String filename) throws IOException {
return XMPUtil.readXMP(new File(filename));
}
/**
* Try to write the given BibTexEntry in the XMP-stream of the given
* PDF-file.
*
* Throws an IOException if the file cannot be read or written, so the user
* can remove a lock or cancel the operation.
*
* The method will overwrite existing BibTeX-XMP-data, but keep other
* existing metadata.
*
* This is a convenience method for writeXMP(File, BibEntry).
*
* @param filename
* The filename from which to open the file.
* @param entry
* The entry to write.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws TransformerException
* If the entry was malformed or unsupported.
* @throws IOException
* If the file could not be written to or could not be found.
*/
public static void writeXMP(String filename, BibEntry entry,
BibDatabase database) throws IOException, TransformerException {
XMPUtil.writeXMP(new File(filename), entry, database);
}
/**
* Try to read the BibTexEntries from the XMP-stream of the given PDF-file.
*
* @param file
* The file to read from.
*
* @throws IOException
* Throws an IOException if the file cannot be read, so the user
* than remove a lock or cancel the operation.
*/
public static List<BibEntry> readXMP(File file) throws IOException {
List<BibEntry> result = Collections.emptyList();
try (FileInputStream inputStream = new FileInputStream(file)) {
result = XMPUtil.readXMP(inputStream);
}
return result;
}
/**
* Try to read the given BibTexEntry from the XMP-stream of the given
* inputstream containing a PDF-file.
*
* @param inputStream
* The inputstream to read from.
*
* @throws IOException
* Throws an IOException if the file cannot be read, so the user
* than remove a lock or cancel the operation.
*
* @return list of BibEntries retrieved from the stream. May be empty, but never null
*/
public static List<BibEntry> readXMP(InputStream inputStream)
throws IOException {
List<BibEntry> result = new LinkedList<>();
try (PDDocument document = PDDocument.load(inputStream)) {
if (document.isEncrypted()) {
throw new EncryptionNotSupportedException("Error: Cannot read metadata from encrypted document.");
}
Optional<XMPMetadata> meta = XMPUtil.getXMPMetadata(document);
if (meta.isPresent()) {
List<XMPSchema> schemas = meta.get().getSchemasByNamespaceURI(XMPSchemaBibtex.NAMESPACE);
for (XMPSchema schema : schemas) {
XMPSchemaBibtex bib = (XMPSchemaBibtex) schema;
BibEntry entry = bib.getBibtexEntry();
if (entry.getType() == null) {
entry.setType("misc");
}
result.add(entry);
}
// If we did not find anything have a look if a Dublin Core exists
if (result.isEmpty()) {
schemas = meta.get().getSchemasByNamespaceURI(XMPSchemaDublinCore.NAMESPACE);
for (XMPSchema schema : schemas) {
XMPSchemaDublinCore dc = (XMPSchemaDublinCore) schema;
Optional<BibEntry> entry = XMPUtil.getBibtexEntryFromDublinCore(dc);
if (entry.isPresent()) {
if (entry.get().getType() == null) {
entry.get().setType("misc");
}
result.add(entry.get());
}
}
}
}
if (result.isEmpty()) {
// If we did not find any XMP metadata, search for non XMP metadata
PDDocumentInformation documentInformation = document.getDocumentInformation();
Optional<BibEntry> entry = XMPUtil.getBibtexEntryFromDocumentInformation(documentInformation);
if (entry.isPresent()) {
result.add(entry.get());
}
}
}
// return empty list, if no metadata was found
if (result.isEmpty()) {
return Collections.emptyList();
}
return result;
}
/**
* Helper function for retrieving a BibEntry from the
* PDDocumentInformation in a PDF file.
*
* To understand how to get hold of a PDDocumentInformation have a look in
* the test cases for XMPUtil.
*
* The BibEntry is build by mapping individual fields in the document
* information (like author, title, keywords) to fields in a bibtex entry.
*
* @param di
* The document information from which to build a BibEntry.
*
* @return The bibtex entry found in the document information.
*/
public static Optional<BibEntry> getBibtexEntryFromDocumentInformation(
PDDocumentInformation di) {
BibEntry entry = new BibEntry();
entry.setType("misc");
String s = di.getAuthor();
if (s != null) {
entry.setField("author", s);
}
s = di.getTitle();
if (s != null) {
entry.setField("title", s);
}
s = di.getKeywords();
if (s != null) {
entry.setField("keywords", s);
}
s = di.getSubject();
if (s != null) {
entry.setField("abstract", s);
}
COSDictionary dict = di.getDictionary();
for (Map.Entry<COSName, COSBase> o : dict.entrySet()) {
String key = o.getKey().getName();
if (key.startsWith("bibtex/")) {
String value = dict.getString(key);
key = key.substring("bibtex/".length());
if ("entrytype".equals(key)) {
entry.setType(value);
} else {
entry.setField(key, value);
}
}
}
// Return empty Optional if no values were found
return entry.getFieldNames().isEmpty() ? Optional.empty() : Optional.of(entry);
}
/**
* Helper function for retrieving a BibEntry from the DublinCore metadata
* in a PDF file.
*
* To understand how to get hold of a XMPSchemaDublinCore have a look in the
* test cases for XMPUtil.
*
* The BibEntry is build by mapping individual fields in the dublin core
* (like creator, title, subject) to fields in a bibtex entry.
*
* @param dcSchema
* The document information from which to build a BibEntry.
*
* @return The bibtex entry found in the document information.
*/
public static Optional<BibEntry> getBibtexEntryFromDublinCore(XMPSchemaDublinCore dcSchema) {
BibEntry entry = new BibEntry();
/**
* Contributor -> Editor
*/
List<String> contributors = dcSchema.getContributors();
if ((contributors != null) && !contributors.isEmpty()) {
entry.setField("editor", String.join(" and ", contributors));
}
/**
* Author -> Creator
*/
List<String> creators = dcSchema.getCreators();
if ((creators != null) && !creators.isEmpty()) {
entry.setField("author", String.join(" and ", creators));
}
/**
* Year + Month -> Date
*/
List<String> dates = dcSchema.getSequenceList("dc:date");
if ((dates != null) && !dates.isEmpty()) {
String date = dates.get(0).trim();
Calendar c = null;
try {
c = DateConverter.toCalendar(date);
} catch (IOException ignored) {
// Ignored
}
if (c != null) {
entry.setField("year", String.valueOf(c.get(Calendar.YEAR)));
if (date.length() > 4) {
entry.setField("month", MonthUtil.getMonthByIndex(c.get(Calendar.MONTH)).bibtexFormat);
}
}
}
/**
* Abstract -> Description
*/
String s = dcSchema.getDescription();
if (s != null) {
entry.setField("abstract", s);
}
/**
* Identifier -> DOI
*/
s = dcSchema.getIdentifier();
if (s != null) {
entry.setField("doi", s);
}
/**
* Publisher -> Publisher
*/
List<String> publishers = dcSchema.getPublishers();
if ((publishers != null) && !publishers.isEmpty()) {
entry.setField("publishers", String.join(" and ", publishers));
}
/**
* Relation -> bibtexkey
*
* We abuse the relationship attribute to store all other values in the
* bibtex document
*/
List<String> relationships = dcSchema.getRelationships();
if (relationships != null) {
for (String r : relationships) {
if (r.startsWith("bibtex/")) {
r = r.substring("bibtex/".length());
int i = r.indexOf('/');
if (i != -1) {
entry.setField(r.substring(0, i), r.substring(i + 1));
}
}
}
}
/**
* Rights -> Rights
*/
s = dcSchema.getRights();
if (s != null) {
entry.setField("rights", s);
}
/**
* Source -> Source
*/
s = dcSchema.getSource();
if (s != null) {
entry.setField("source", s);
}
/**
* Subject -> Keywords
*/
List<String> subjects = dcSchema.getSubjects();
if (subjects != null) {
entry.addKeywords(subjects);
}
/**
* Title -> Title
*/
s = dcSchema.getTitle();
if (s != null) {
entry.setField("title", s);
}
/**
* Type -> Type
*/
List<String> l = dcSchema.getTypes();
if ((l != null) && !l.isEmpty()) {
s = l.get(0);
if (s != null) {
entry.setType(s);
}
}
return entry.getFieldNames().isEmpty() ? Optional.empty() : Optional.of(entry);
}
/**
* Try to write the given BibTexEntry in the XMP-stream of the given
* PDF-file.
*
* Throws an IOException if the file cannot be read or written, so the user
* can remove a lock or cancel the operation.
*
* The method will overwrite existing BibTeX-XMP-data, but keep other
* existing metadata.
*
* This is a convenience method for writeXMP(File, Collection).
*
* @param file
* The file to write to.
* @param entry
* The entry to write.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws TransformerException
* If the entry was malformed or unsupported.
* @throws IOException
* If the file could not be written to or could not be found.
*/
public static void writeXMP(File file, BibEntry entry,
BibDatabase database) throws IOException, TransformerException {
List<BibEntry> l = new LinkedList<>();
l.add(entry);
XMPUtil.writeXMP(file, l, database, true);
}
/**
* Write the given BibtexEntries as XMP-metadata text to the given stream.
*
* The text that is written to the stream contains a complete XMP-document.
*
* @param bibtexEntries
* The BibtexEntries to write XMP-metadata for.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws TransformerException
* Thrown if the bibtexEntries could not transformed to XMP.
* @throws IOException
* Thrown if an IOException occured while writing to the stream.
*
* @see #toXMP(java.util.Collection, BibDatabase) if you don't need strings to be
* resolved.
*/
private static void toXMP(Collection<BibEntry> bibtexEntries,
BibDatabase database, OutputStream outputStream)
throws IOException, TransformerException {
Collection<BibEntry> resolvedEntries;
if (database == null) {
resolvedEntries = bibtexEntries;
} else {
resolvedEntries = database.resolveForStrings(bibtexEntries, true);
}
XMPMetadata x = new XMPMetadata();
for (BibEntry e : resolvedEntries) {
XMPSchemaBibtex schema = new XMPSchemaBibtex(x);
x.addSchema(schema);
schema.setBibtexEntry(e);
}
x.save(outputStream);
}
/**
* Convenience method for toXMP(Collection<BibEntry>, BibDatabase,
* OutputStream) returning a String containing the XMP-metadata of the given
* collection of BibtexEntries.
*
* The resulting metadata string is wrapped as a complete XMP-document.
*
* @param bibtexEntries
* The BibtexEntries to return XMP-metadata for.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @return The XMP representation of the given bibtexEntries.
* @throws TransformerException
* Thrown if the bibtexEntries could not transformed to XMP.
*/
public static String toXMP(Collection<BibEntry> bibtexEntries,
BibDatabase database) throws TransformerException {
try {
ByteArrayOutputStream bs = new ByteArrayOutputStream();
XMPUtil.toXMP(bibtexEntries, database, bs);
return bs.toString();
} catch (IOException e) {
throw new TransformerException(e);
}
}
/**
* Will read the XMPMetadata from the given pdf file, closing the file
* afterwards.
*
* @param inputStream
* The inputStream representing a PDF-file to read the
* XMPMetadata from.
* @return The XMPMetadata object found in the file
*/
private static Optional<XMPMetadata> readRawXMP(InputStream inputStream) throws IOException {
try (PDDocument document = PDDocument.load(inputStream)) {
if (document.isEncrypted()) {
throw new EncryptionNotSupportedException("Error: Cannot read metadata from encrypted document.");
}
return XMPUtil.getXMPMetadata(document);
}
}
/**
* @return empty Optional if no metadata has been found
*/
private static Optional<XMPMetadata> getXMPMetadata(PDDocument document) throws IOException {
PDDocumentCatalog catalog = document.getDocumentCatalog();
PDMetadata metaRaw = catalog.getMetadata();
if (metaRaw == null) {
return Optional.empty();
}
Document parseResult;
try (InputStream is = metaRaw.createInputStream()) {
parseResult = XMLUtil.parse(is);
}
XMPMetadata meta = new XMPMetadata(parseResult);
meta.addXMLNSMapping(XMPSchemaBibtex.NAMESPACE, XMPSchemaBibtex.class);
return Optional.of(meta);
}
/**
* Will read the XMPMetadata from the given pdf file, closing the file
* afterwards.
*
* @param file
* The file to read the XMPMetadata from.
* @return The XMPMetadata object found in the file
*/
public static Optional<XMPMetadata> readRawXMP(File file) throws IOException {
try (FileInputStream inputStream = new FileInputStream(file)) {
return XMPUtil.readRawXMP(inputStream);
}
}
private static void writeToDCSchema(XMPSchemaDublinCore dcSchema,
BibEntry entry, BibDatabase database) {
BibEntry resolvedEntry;
if (database == null) {
resolvedEntry = entry;
} else {
resolvedEntry = database.resolveForStrings(entry, false);
}
// Query privacy filter settings
JabRefPreferences prefs = JabRefPreferences.getInstance();
boolean useXmpPrivacyFilter =
prefs.getBoolean(JabRefPreferences.USE_XMP_PRIVACY_FILTER);
// Fields for which not to write XMP data later on:
Set<String> filters = new TreeSet<>(prefs.getStringList(JabRefPreferences.XMP_PRIVACY_FILTERS));
// Set all the values including key and entryType
for (String field : resolvedEntry.getFieldNames()) {
if (useXmpPrivacyFilter && filters.contains(field)) {
continue;
}
if ("editor".equals(field)) {
String authors = resolvedEntry.getField(field);
/**
* Editor -> Contributor
*
* Field: dc:contributor
*
* Type: bag ProperName
*
* Category: External
*
* Description: Contributors to the resource (other than the
* authors).
*
* Bibtex-Fields used: editor
*/
AuthorList list = AuthorList.getAuthorList(authors);
for (AuthorList.Author author : list.getAuthorList()) {
dcSchema.addContributor(author.getFirstLast(false));
}
continue;
}
/**
* ? -> Coverage
*
* Unmapped
*
* dc:coverage Text External The extent or scope of the resource.
*/
/**
* Author -> Creator
*
* Field: dc:creator
*
* Type: seq ProperName
*
* Category: External
*
* Description: The authors of the resource (listed in order of
* precedence, if significant).
*
* Bibtex-Fields used: author
*/
if ("author".equals(field)) {
String authors = resolvedEntry.getField(field);
AuthorList list = AuthorList.getAuthorList(authors);
int n = list.size();
for (int i = 0; i < n; i++) {
dcSchema.addCreator(list.getAuthor(i).getFirstLast(false));
}
continue;
}
if ("month".equals(field)) {
// Dealt with in year
continue;
}
if ("year".equals(field)) {
/**
* Year + Month -> Date
*
* Field: dc:date
*
* Type: seq Date
*
* Category: External
*
* Description: Date(s) that something interesting happened to
* the resource.
*
* Bibtex-Fields used: year, month
*/
String publicationDate = entry.getPublicationDate();
if (publicationDate != null) {
dcSchema.addSequenceValue("dc:date", publicationDate);
}
continue;
}
/**
* Abstract -> Description
*
* Field: dc:description
*
* Type: Lang Alt
*
* Category: External
*
* Description: A textual description of the content of the
* resource. Multiple values may be present for different languages.
*
* Bibtex-Fields used: abstract
*/
if ("abstract".equals(field)) {
String o = resolvedEntry.getField(field);
dcSchema.setDescription(o);
continue;
}
/**
* DOI -> identifier
*
* Field: dc:identifier
*
* Type: Text
*
* Category: External
*
* Description: Unique identifier of the resource.
*
* Bibtex-Fields used: doi
*/
if ("doi".equals(field)) {
String o = resolvedEntry.getField(field);
dcSchema.setIdentifier(o);
continue;
}
/**
* ? -> Language
*
* Unmapped
*
* dc:language bag Locale Internal An unordered array specifying the
* languages used in the resource.
*/
/**
* Publisher -> Publisher
*
* Field: dc:publisher
*
* Type: bag ProperName
*
* Category: External
*
* Description: Publishers.
*
* Bibtex-Fields used: doi
*/
if ("publisher".equals(field)) {
String o = entry.getField(field);
dcSchema.addPublisher(o);
continue;
}
/**
* ? -> Rights
*
* Unmapped
*
* dc:rights Lang Alt External Informal rights statement, selected
* by language.
*/
/**
* ? -> Source
*
* Unmapped
*
* dc:source Text External Unique identifier of the work from which
* this resource was derived.
*/
/**
* Keywords -> Subject
*
* Field: dc:subject
*
* Type: bag Text
*
* Category: External
*
* Description: An unordered array of descriptive phrases or
* keywords that specify the topic of the content of the resource.
*
* Bibtex-Fields used: doi
*/
if ("keywords".equals(field)) {
String o = entry.getField(field);
String[] keywords = o.split(",");
for (String keyword : keywords) {
dcSchema.addSubject(keyword.trim());
}
continue;
}
/**
* Title -> Title
*
* Field: dc:title
*
* Type: Lang Alt
*
* Category: External
*
* Description: The title of the document, or the name given to the
* resource. Typically, it will be a name by which the resource is
* formally known.
*
* Bibtex-Fields used: title
*/
if ("title".equals(field)) {
String o = entry.getField(field);
dcSchema.setTitle(o);
continue;
}
/**
* bibtextype -> relation
*
* Field: dc:relation
*
* Type: bag Text
*
* Category: External
*
* Description: Relationships to other documents.
*
* Bibtex-Fields used: bibtextype
*/
/**
* All others (including the bibtex key) get packaged in the
* relation attribute
*/
String o = entry.getField(field);
dcSchema.addRelation("bibtex/" + field + '/' + o);
}
/**
* ? -> Format
*
* Unmapped
*
* dc:format MIMEType Internal The file format used when saving the
* resource. Tools and applications should set this property to the save
* format of the data. It may include appropriate qualifiers.
*/
dcSchema.setFormat("application/pdf");
/**
* Type -> Type
*
* Field: dc:type
*
* Type: bag open Choice
*
* Category: External
*
* Description: A document type; for example, novel, poem, or working
* paper.
*
* Bibtex-Fields used: title
*/
TypedBibEntry typedEntry = new TypedBibEntry(entry, Optional.empty(), BibDatabaseMode.BIBTEX);
String o = typedEntry.getTypeForDisplay();
if (!o.isEmpty()) {
dcSchema.addType(o);
}
}
/**
* Try to write the given BibTexEntry as a DublinCore XMP Schema
*
* Existing DublinCore schemas in the document are not modified.
*
* @param document
* The pdf document to write to.
* @param entry
* The BibTeX entry that is written as a schema.
* @param database
* maybenull An optional database which the given BibTeX entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws IOException
* @throws TransformerException
*/
public static void writeDublinCore(PDDocument document, BibEntry entry,
BibDatabase database) throws IOException, TransformerException {
List<BibEntry> entries = new ArrayList<>();
entries.add(entry);
XMPUtil.writeDublinCore(document, entries, database);
}
/**
* Try to write the given BibTexEntries as DublinCore XMP Schemas
*
* Existing DublinCore schemas in the document are removed
*
* @param document
* The pdf document to write to.
* @param entries
* The BibTeX entries that are written as schemas
* @param database
* maybenull An optional database which the given BibTeX entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws IOException
* @throws TransformerException
*/
private static void writeDublinCore(PDDocument document,
Collection<BibEntry> entries, BibDatabase database)
throws IOException, TransformerException {
Collection<BibEntry> resolvedEntries;
if (database == null) {
resolvedEntries = entries;
} else {
resolvedEntries = database.resolveForStrings(entries, false);
}
PDDocumentCatalog catalog = document.getDocumentCatalog();
PDMetadata metaRaw = catalog.getMetadata();
XMPMetadata meta;
if (metaRaw == null) {
meta = new XMPMetadata();
} else {
meta = new XMPMetadata(XMLUtil.parse(metaRaw.createInputStream()));
}
// Remove all current Dublin-Core schemas
List<XMPSchema> schemas = meta
.getSchemasByNamespaceURI(XMPSchemaDublinCore.NAMESPACE);
for (XMPSchema schema : schemas) {
schema.getElement().getParentNode().removeChild(schema.getElement());
}
for (BibEntry entry : resolvedEntries) {
XMPSchemaDublinCore dcSchema = new XMPSchemaDublinCore(meta);
XMPUtil.writeToDCSchema(dcSchema, entry, null);
meta.addSchema(dcSchema);
}
// Save to stream and then input that stream to the PDF
ByteArrayOutputStream os = new ByteArrayOutputStream();
meta.save(os);
ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
PDMetadata metadataStream = new PDMetadata(document, is, false);
catalog.setMetadata(metadataStream);
}
/**
* Try to write the given BibTexEntry in the Document Information (the
* properties of the pdf).
*
* Existing fields values are overriden if the bibtex entry has the
* corresponding value set.
*
* @param document
* The pdf document to write to.
* @param entry
* The Bibtex entry that is written into the PDF properties. *
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
*/
private static void writeDocumentInformation(PDDocument document,
BibEntry entry, BibDatabase database) {
PDDocumentInformation di = document.getDocumentInformation();
BibEntry resolvedEntry;
if (database == null) {
resolvedEntry = entry;
} else {
resolvedEntry = database.resolveForStrings(entry, false);
}
// Query privacy filter settings
JabRefPreferences prefs = JabRefPreferences.getInstance();
boolean useXmpPrivacyFilter =
prefs.getBoolean(JabRefPreferences.USE_XMP_PRIVACY_FILTER);
// Fields for which not to write XMP data later on:
Set<String> filters = new TreeSet<>(prefs.getStringList(JabRefPreferences.XMP_PRIVACY_FILTERS));
// Set all the values including key and entryType
Set<String> fields = resolvedEntry.getFieldNames();
for (String field : fields) {
if (useXmpPrivacyFilter && filters.contains(field)) {
// erase field instead of adding it
if ("author".equals(field)) {
di.setAuthor(null);
} else if ("title".equals(field)) {
di.setTitle(null);
} else if ("keywords".equals(field)) {
di.setKeywords(null);
} else if ("abstract".equals(field)) {
di.setSubject(null);
} else {
di.setCustomMetadataValue("bibtex/" + field, null);
}
continue;
}
if ("author".equals(field)) {
di.setAuthor(resolvedEntry.getField("author"));
} else if ("title".equals(field)) {
di.setTitle(resolvedEntry.getField("title"));
} else if ("keywords".equals(field)) {
di.setKeywords(resolvedEntry.getField("keywords"));
} else if ("abstract".equals(field)) {
di.setSubject(resolvedEntry.getField("abstract"));
} else {
di.setCustomMetadataValue("bibtex/" + field, resolvedEntry.getField(field));
}
}
di.setCustomMetadataValue("bibtex/entrytype", EntryUtil.capitalizeFirst(resolvedEntry.getType()));
}
/**
* Try to write the given BibTexEntry in the XMP-stream of the given
* PDF-file.
*
* Throws an IOException if the file cannot be read or written, so the user
* can remove a lock or cancel the operation.
*
* The method will overwrite existing BibTeX-XMP-data, but keep other
* existing metadata.
*
* @param file
* The file to write the entries to.
* @param bibtexEntries
* The entries to write to the file. *
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @param writePDFInfo
* Write information also in PDF document properties
* @throws TransformerException
* If the entry was malformed or unsupported.
* @throws IOException
* If the file could not be written to or could not be found.
*/
public static void writeXMP(File file,
Collection<BibEntry> bibtexEntries, BibDatabase database,
boolean writePDFInfo) throws IOException, TransformerException {
Collection<BibEntry> resolvedEntries;
if (database == null) {
resolvedEntries = bibtexEntries;
} else {
resolvedEntries = database.resolveForStrings(bibtexEntries, false);
}
try (PDDocument document = PDDocument.load(file.getAbsoluteFile())) {
if (document.isEncrypted()) {
throw new EncryptionNotSupportedException(
"Error: Cannot add metadata to encrypted document.");
}
if (writePDFInfo && (resolvedEntries.size() == 1)) {
XMPUtil.writeDocumentInformation(document, resolvedEntries
.iterator().next(), null);
XMPUtil.writeDublinCore(document, resolvedEntries, null);
}
PDDocumentCatalog catalog = document.getDocumentCatalog();
PDMetadata metaRaw = catalog.getMetadata();
XMPMetadata meta;
if (metaRaw == null) {
meta = new XMPMetadata();
} else {
meta = new XMPMetadata(XMLUtil.parse(metaRaw.createInputStream()));
}
meta.addXMLNSMapping(XMPSchemaBibtex.NAMESPACE,
XMPSchemaBibtex.class);
// Remove all current Bibtex-schemas
List<XMPSchema> schemas = meta
.getSchemasByNamespaceURI(XMPSchemaBibtex.NAMESPACE);
for (XMPSchema schema : schemas) {
XMPSchemaBibtex bib = (XMPSchemaBibtex) schema;
bib.getElement().getParentNode().removeChild(bib.getElement());
}
for (BibEntry e : resolvedEntries) {
XMPSchemaBibtex bibtex = new XMPSchemaBibtex(meta);
meta.addSchema(bibtex);
bibtex.setBibtexEntry(e, null);
}
// Save to stream and then input that stream to the PDF
ByteArrayOutputStream os = new ByteArrayOutputStream();
meta.save(os);
ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
PDMetadata metadataStream = new PDMetadata(document, is, false);
catalog.setMetadata(metadataStream);
// Save
try {
document.save(file.getAbsolutePath());
} catch (COSVisitorException e) {
throw new TransformerException("Could not write XMP-metadata: "
+ e.getLocalizedMessage());
}
}
}
/**
* Print usage information for the command line tool xmpUtil.
*
* @see XMPUtil#main(String[])
*/
private static void usage() {
System.out.println("Read or write XMP-metadata from or to pdf file.");
System.out.println("");
System.out.println("Usage:");
System.out.println("Read from PDF and print as bibtex:");
System.out.println(" xmpUtil <pdf>");
System.out.println("Read from PDF and print raw XMP:");
System.out.println(" xmpUtil -x <pdf>");
System.out
.println("Write the entry in <bib> given by <key> to the PDF:");
System.out.println(" xmpUtil <key> <bib> <pdf>");
System.out.println("Write all entries in <bib> to the PDF:");
System.out.println(" xmpUtil <bib> <pdf>");
System.out.println("");
System.out
.println("To report bugs visit http://jabref.sourceforge.net");
}
/**
* Command-line tool for working with XMP-data.
*
* Read or write XMP-metadata from or to pdf file.
*
* Usage:
* <dl>
* <dd>Read from PDF and print as bibtex:</dd>
* <dt>xmpUtil PDF</dt>
* <dd>Read from PDF and print raw XMP:</dd>
* <dt>xmpUtil -x PDF</dt>
* <dd>Write the entry in BIB given by KEY to the PDF:</dd>
* <dt>xmpUtil KEY BIB PDF</dt>
* <dd>Write all entries in BIB to the PDF:</dd>
* <dt>xmpUtil BIB PDF</dt>
* </dl>
*
* @param args
* Command line strings passed to utility.
* @throws IOException
* If any of the given files could not be read or written.
* @throws TransformerException
* If the given BibEntry is malformed.
*/
public static void main(String[] args) throws IOException,
TransformerException {
// Don't forget to initialize the preferences
if (Globals.prefs == null) {
Globals.prefs = JabRefPreferences.getInstance();
}
switch (args.length) {
case 0:
XMPUtil.usage();
break;
case 1:
if (args[0].endsWith(".pdf")) {
// Read from pdf and write as BibTex
List<BibEntry> l = XMPUtil.readXMP(new File(args[0]));
BibEntryWriter bibtexEntryWriter = new BibEntryWriter(new LatexFieldFormatter(), false);
for (BibEntry entry : l) {
StringWriter sw = new StringWriter();
bibtexEntryWriter.write(entry, sw, BibDatabaseMode.BIBTEX);
System.out.println(sw.getBuffer());
}
} else if (args[0].endsWith(".bib")) {
// Read from bib and write as XMP
try (FileReader fr = new FileReader(args[0])) {
ParserResult result = BibtexParser.parse(fr);
Collection<BibEntry> entries = result.getDatabase().getEntries();
if (entries.isEmpty()) {
System.err.println("Could not find BibEntry in " + args[0]);
} else {
System.out.println(XMPUtil.toXMP(entries, result.getDatabase()));
}
}
} else {
XMPUtil.usage();
}
break;
case 2:
if ("-x".equals(args[0]) && args[1].endsWith(".pdf")) {
// Read from pdf and write as BibTex
Optional<XMPMetadata> meta = XMPUtil.readRawXMP(new File(args[1]));
if (meta.isPresent()) {
XMLUtil.save(meta.get().getXMPDocument(), System.out, StandardCharsets.UTF_8.name());
} else {
System.err
.println("The given pdf does not contain any XMP-metadata.");
}
break;
}
if (args[0].endsWith(".bib") && args[1].endsWith(".pdf")) {
ParserResult result = BibtexParser
.parse(new FileReader(args[0]));
Collection<BibEntry> entries = result.getDatabase()
.getEntries();
if (entries.isEmpty()) {
System.err.println("Could not find BibEntry in "
+ args[0]);
} else {
XMPUtil.writeXMP(new File(args[1]), entries, result
.getDatabase(), false);
System.out.println("XMP written.");
}
break;
}
XMPUtil.usage();
break;
case 3:
if (!args[1].endsWith(".bib") && !args[2].endsWith(".pdf")) {
XMPUtil.usage();
break;
}
ParserResult result = BibtexParser.parse(new FileReader(args[1]));
BibEntry bibEntry = result.getDatabase().getEntryByKey(args[0]);
if (bibEntry == null) {
System.err.println("Could not find BibEntry " + args[0]
+ " in " + args[0]);
} else {
XMPUtil.writeXMP(new File(args[2]), bibEntry, result.getDatabase());
System.out.println("XMP written.");
}
break;
default:
XMPUtil.usage();
}
}
/**
* see XMPUtil.hasMetadata(InputStream)
*/
public static boolean hasMetadata(Path path) {
try (InputStream inputStream = Files.newInputStream(path, StandardOpenOption.READ)) {
return hasMetadata(inputStream);
} catch (IOException e) {
LOGGER.error("XMP reading failed", e);
return false;
}
}
/**
* Will try to read XMP metadata from the given file, returning whether
* metadata was found.
*
* Caution: This method is as expensive as it is reading the actual metadata
* itself from the PDF.
*
* @param inputStream
* The inputStream to read the PDF from.
* @return whether a BibEntry was found in the given PDF.
*/
public static boolean hasMetadata(InputStream inputStream) {
try {
List<BibEntry> bibEntries = XMPUtil.readXMP(inputStream);
return !bibEntries.isEmpty();
} catch (EncryptionNotSupportedException ex) {
LOGGER.info("Encryption not supported by XMPUtil");
return false;
} catch (IOException e) {
LOGGER.error("XMP reading failed", e);
return false;
}
}
}
| src/main/java/net/sf/jabref/logic/xmp/XMPUtil.java | /* Copyright (C) 2003-2016 JabRef contributors.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package net.sf.jabref.logic.xmp;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.*;
import javax.xml.transform.TransformerException;
import net.sf.jabref.*;
import net.sf.jabref.exporter.LatexFieldFormatter;
import net.sf.jabref.importer.fileformat.BibtexParser;
import net.sf.jabref.importer.ParserResult;
import net.sf.jabref.model.database.BibDatabaseMode;
import net.sf.jabref.model.entry.*;
import net.sf.jabref.bibtex.BibEntryWriter;
import net.sf.jabref.model.database.BibDatabase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jempbox.impl.DateConverter;
import org.apache.jempbox.impl.XMLUtil;
import org.apache.jempbox.xmp.XMPMetadata;
import org.apache.jempbox.xmp.XMPSchema;
import org.apache.jempbox.xmp.XMPSchemaDublinCore;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.exceptions.COSVisitorException;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentCatalog;
import org.apache.pdfbox.pdmodel.PDDocumentInformation;
import org.apache.pdfbox.pdmodel.common.PDMetadata;
import org.w3c.dom.Document;
/**
* XMPUtils provide support for reading and writing BibTex data as XMP-Metadata
* in PDF-documents.
*/
public class XMPUtil {
private static final Log LOGGER = LogFactory.getLog(XMPUtil.class);
/**
* Convenience method for readXMP(File).
*
* @param filename
* The filename from which to open the file.
* @return BibtexEntryies found in the PDF or an empty list
* @throws IOException
*/
public static List<BibEntry> readXMP(String filename) throws IOException {
return XMPUtil.readXMP(new File(filename));
}
/**
* Try to write the given BibTexEntry in the XMP-stream of the given
* PDF-file.
*
* Throws an IOException if the file cannot be read or written, so the user
* can remove a lock or cancel the operation.
*
* The method will overwrite existing BibTeX-XMP-data, but keep other
* existing metadata.
*
* This is a convenience method for writeXMP(File, BibEntry).
*
* @param filename
* The filename from which to open the file.
* @param entry
* The entry to write.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws TransformerException
* If the entry was malformed or unsupported.
* @throws IOException
* If the file could not be written to or could not be found.
*/
public static void writeXMP(String filename, BibEntry entry,
BibDatabase database) throws IOException, TransformerException {
XMPUtil.writeXMP(new File(filename), entry, database);
}
/**
* Try to read the BibTexEntries from the XMP-stream of the given PDF-file.
*
* @param file
* The file to read from.
*
* @throws IOException
* Throws an IOException if the file cannot be read, so the user
* than remove a lock or cancel the operation.
*/
public static List<BibEntry> readXMP(File file) throws IOException {
List<BibEntry> result = Collections.EMPTY_LIST;
try (FileInputStream inputStream = new FileInputStream(file)) {
result = XMPUtil.readXMP(inputStream);
}
return result;
}
/**
* Try to read the given BibTexEntry from the XMP-stream of the given
* inputstream containing a PDF-file.
*
* @param inputStream
* The inputstream to read from.
*
* @throws IOException
* Throws an IOException if the file cannot be read, so the user
* than remove a lock or cancel the operation.
*
* @return list of BibEntries retrieved from the stream. May be empty, but never null
*/
public static List<BibEntry> readXMP(InputStream inputStream)
throws IOException {
List<BibEntry> result = new LinkedList<>();
try (PDDocument document = PDDocument.load(inputStream)) {
if (document.isEncrypted()) {
throw new EncryptionNotSupportedException("Error: Cannot read metadata from encrypted document.");
}
Optional<XMPMetadata> meta = XMPUtil.getXMPMetadata(document);
if (meta.isPresent()) {
List<XMPSchema> schemas = meta.get().getSchemasByNamespaceURI(XMPSchemaBibtex.NAMESPACE);
for (XMPSchema schema : schemas) {
XMPSchemaBibtex bib = (XMPSchemaBibtex) schema;
BibEntry entry = bib.getBibtexEntry();
if (entry.getType() == null) {
entry.setType("misc");
}
result.add(entry);
}
// If we did not find anything have a look if a Dublin Core exists
if (result.isEmpty()) {
schemas = meta.get().getSchemasByNamespaceURI(XMPSchemaDublinCore.NAMESPACE);
for (XMPSchema schema : schemas) {
XMPSchemaDublinCore dc = (XMPSchemaDublinCore) schema;
Optional<BibEntry> entry = XMPUtil.getBibtexEntryFromDublinCore(dc);
if (entry.isPresent()) {
if (entry.get().getType() == null) {
entry.get().setType("misc");
}
result.add(entry.get());
}
}
}
}
if (result.isEmpty()) {
// If we did not find any XMP metadata, search for non XMP metadata
PDDocumentInformation documentInformation = document.getDocumentInformation();
Optional<BibEntry> entry = XMPUtil.getBibtexEntryFromDocumentInformation(documentInformation);
if (entry.isPresent()) {
result.add(entry.get());
}
}
}
// return empty list, if no metadata was found
if (result.isEmpty()) {
return Collections.emptyList();
}
return result;
}
/**
* Helper function for retrieving a BibEntry from the
* PDDocumentInformation in a PDF file.
*
* To understand how to get hold of a PDDocumentInformation have a look in
* the test cases for XMPUtil.
*
* The BibEntry is build by mapping individual fields in the document
* information (like author, title, keywords) to fields in a bibtex entry.
*
* @param di
* The document information from which to build a BibEntry.
*
* @return The bibtex entry found in the document information.
*/
public static Optional<BibEntry> getBibtexEntryFromDocumentInformation(
PDDocumentInformation di) {
BibEntry entry = new BibEntry();
entry.setType("misc");
String s = di.getAuthor();
if (s != null) {
entry.setField("author", s);
}
s = di.getTitle();
if (s != null) {
entry.setField("title", s);
}
s = di.getKeywords();
if (s != null) {
entry.setField("keywords", s);
}
s = di.getSubject();
if (s != null) {
entry.setField("abstract", s);
}
COSDictionary dict = di.getDictionary();
for (Map.Entry<COSName, COSBase> o : dict.entrySet()) {
String key = o.getKey().getName();
if (key.startsWith("bibtex/")) {
String value = dict.getString(key);
key = key.substring("bibtex/".length());
if ("entrytype".equals(key)) {
entry.setType(value);
} else {
entry.setField(key, value);
}
}
}
// Return empty Optional if no values were found
return entry.getFieldNames().isEmpty() ? Optional.empty() : Optional.of(entry);
}
/**
* Helper function for retrieving a BibEntry from the DublinCore metadata
* in a PDF file.
*
* To understand how to get hold of a XMPSchemaDublinCore have a look in the
* test cases for XMPUtil.
*
* The BibEntry is build by mapping individual fields in the dublin core
* (like creator, title, subject) to fields in a bibtex entry.
*
* @param dcSchema
* The document information from which to build a BibEntry.
*
* @return The bibtex entry found in the document information.
*/
public static Optional<BibEntry> getBibtexEntryFromDublinCore(XMPSchemaDublinCore dcSchema) {
BibEntry entry = new BibEntry();
/**
* Contributor -> Editor
*/
List<String> contributors = dcSchema.getContributors();
if ((contributors != null) && !contributors.isEmpty()) {
entry.setField("editor", String.join(" and ", contributors));
}
/**
* Author -> Creator
*/
List<String> creators = dcSchema.getCreators();
if ((creators != null) && !creators.isEmpty()) {
entry.setField("author", String.join(" and ", creators));
}
/**
* Year + Month -> Date
*/
List<String> dates = dcSchema.getSequenceList("dc:date");
if ((dates != null) && !dates.isEmpty()) {
String date = dates.get(0).trim();
Calendar c = null;
try {
c = DateConverter.toCalendar(date);
} catch (IOException ignored) {
// Ignored
}
if (c != null) {
entry.setField("year", String.valueOf(c.get(Calendar.YEAR)));
if (date.length() > 4) {
entry.setField("month", MonthUtil.getMonthByIndex(c.get(Calendar.MONTH)).bibtexFormat);
}
}
}
/**
* Abstract -> Description
*/
String s = dcSchema.getDescription();
if (s != null) {
entry.setField("abstract", s);
}
/**
* Identifier -> DOI
*/
s = dcSchema.getIdentifier();
if (s != null) {
entry.setField("doi", s);
}
/**
* Publisher -> Publisher
*/
List<String> publishers = dcSchema.getPublishers();
if ((publishers != null) && !publishers.isEmpty()) {
entry.setField("publishers", String.join(" and ", publishers));
}
/**
* Relation -> bibtexkey
*
* We abuse the relationship attribute to store all other values in the
* bibtex document
*/
List<String> relationships = dcSchema.getRelationships();
if (relationships != null) {
for (String r : relationships) {
if (r.startsWith("bibtex/")) {
r = r.substring("bibtex/".length());
int i = r.indexOf('/');
if (i != -1) {
entry.setField(r.substring(0, i), r.substring(i + 1));
}
}
}
}
/**
* Rights -> Rights
*/
s = dcSchema.getRights();
if (s != null) {
entry.setField("rights", s);
}
/**
* Source -> Source
*/
s = dcSchema.getSource();
if (s != null) {
entry.setField("source", s);
}
/**
* Subject -> Keywords
*/
List<String> subjects = dcSchema.getSubjects();
if (subjects != null) {
entry.addKeywords(subjects);
}
/**
* Title -> Title
*/
s = dcSchema.getTitle();
if (s != null) {
entry.setField("title", s);
}
/**
* Type -> Type
*/
List<String> l = dcSchema.getTypes();
if ((l != null) && !l.isEmpty()) {
s = l.get(0);
if (s != null) {
entry.setType(s);
}
}
return entry.getFieldNames().isEmpty() ? Optional.empty() : Optional.of(entry);
}
/**
* Try to write the given BibTexEntry in the XMP-stream of the given
* PDF-file.
*
* Throws an IOException if the file cannot be read or written, so the user
* can remove a lock or cancel the operation.
*
* The method will overwrite existing BibTeX-XMP-data, but keep other
* existing metadata.
*
* This is a convenience method for writeXMP(File, Collection).
*
* @param file
* The file to write to.
* @param entry
* The entry to write.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws TransformerException
* If the entry was malformed or unsupported.
* @throws IOException
* If the file could not be written to or could not be found.
*/
public static void writeXMP(File file, BibEntry entry,
BibDatabase database) throws IOException, TransformerException {
List<BibEntry> l = new LinkedList<>();
l.add(entry);
XMPUtil.writeXMP(file, l, database, true);
}
/**
* Write the given BibtexEntries as XMP-metadata text to the given stream.
*
* The text that is written to the stream contains a complete XMP-document.
*
* @param bibtexEntries
* The BibtexEntries to write XMP-metadata for.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws TransformerException
* Thrown if the bibtexEntries could not transformed to XMP.
* @throws IOException
* Thrown if an IOException occured while writing to the stream.
*
* @see #toXMP(java.util.Collection, BibDatabase) if you don't need strings to be
* resolved.
*/
private static void toXMP(Collection<BibEntry> bibtexEntries,
BibDatabase database, OutputStream outputStream)
throws IOException, TransformerException {
Collection<BibEntry> resolvedEntries;
if (database == null) {
resolvedEntries = bibtexEntries;
} else {
resolvedEntries = database.resolveForStrings(bibtexEntries, true);
}
XMPMetadata x = new XMPMetadata();
for (BibEntry e : resolvedEntries) {
XMPSchemaBibtex schema = new XMPSchemaBibtex(x);
x.addSchema(schema);
schema.setBibtexEntry(e);
}
x.save(outputStream);
}
/**
* Convenience method for toXMP(Collection<BibEntry>, BibDatabase,
* OutputStream) returning a String containing the XMP-metadata of the given
* collection of BibtexEntries.
*
* The resulting metadata string is wrapped as a complete XMP-document.
*
* @param bibtexEntries
* The BibtexEntries to return XMP-metadata for.
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @return The XMP representation of the given bibtexEntries.
* @throws TransformerException
* Thrown if the bibtexEntries could not transformed to XMP.
*/
public static String toXMP(Collection<BibEntry> bibtexEntries,
BibDatabase database) throws TransformerException {
try {
ByteArrayOutputStream bs = new ByteArrayOutputStream();
XMPUtil.toXMP(bibtexEntries, database, bs);
return bs.toString();
} catch (IOException e) {
throw new TransformerException(e);
}
}
/**
* Will read the XMPMetadata from the given pdf file, closing the file
* afterwards.
*
* @param inputStream
* The inputStream representing a PDF-file to read the
* XMPMetadata from.
* @return The XMPMetadata object found in the file
*/
private static Optional<XMPMetadata> readRawXMP(InputStream inputStream) throws IOException {
try (PDDocument document = PDDocument.load(inputStream)) {
if (document.isEncrypted()) {
throw new EncryptionNotSupportedException("Error: Cannot read metadata from encrypted document.");
}
return XMPUtil.getXMPMetadata(document);
}
}
/**
* @return empty Optional if no metadata has been found
*/
private static Optional<XMPMetadata> getXMPMetadata(PDDocument document) throws IOException {
PDDocumentCatalog catalog = document.getDocumentCatalog();
PDMetadata metaRaw = catalog.getMetadata();
if (metaRaw == null) {
return Optional.empty();
}
Document parseResult;
try (InputStream is = metaRaw.createInputStream()) {
parseResult = XMLUtil.parse(is);
}
XMPMetadata meta = new XMPMetadata(parseResult);
meta.addXMLNSMapping(XMPSchemaBibtex.NAMESPACE, XMPSchemaBibtex.class);
return Optional.of(meta);
}
/**
* Will read the XMPMetadata from the given pdf file, closing the file
* afterwards.
*
* @param file
* The file to read the XMPMetadata from.
* @return The XMPMetadata object found in the file
*/
public static Optional<XMPMetadata> readRawXMP(File file) throws IOException {
try (FileInputStream inputStream = new FileInputStream(file)) {
return XMPUtil.readRawXMP(inputStream);
}
}
private static void writeToDCSchema(XMPSchemaDublinCore dcSchema,
BibEntry entry, BibDatabase database) {
BibEntry resolvedEntry;
if (database == null) {
resolvedEntry = entry;
} else {
resolvedEntry = database.resolveForStrings(entry, false);
}
// Query privacy filter settings
JabRefPreferences prefs = JabRefPreferences.getInstance();
boolean useXmpPrivacyFilter =
prefs.getBoolean(JabRefPreferences.USE_XMP_PRIVACY_FILTER);
// Fields for which not to write XMP data later on:
Set<String> filters = new TreeSet<>(prefs.getStringList(JabRefPreferences.XMP_PRIVACY_FILTERS));
// Set all the values including key and entryType
for (String field : resolvedEntry.getFieldNames()) {
if (useXmpPrivacyFilter && filters.contains(field)) {
continue;
}
if ("editor".equals(field)) {
String authors = resolvedEntry.getField(field);
/**
* Editor -> Contributor
*
* Field: dc:contributor
*
* Type: bag ProperName
*
* Category: External
*
* Description: Contributors to the resource (other than the
* authors).
*
* Bibtex-Fields used: editor
*/
AuthorList list = AuthorList.getAuthorList(authors);
for (AuthorList.Author author : list.getAuthorList()) {
dcSchema.addContributor(author.getFirstLast(false));
}
continue;
}
/**
* ? -> Coverage
*
* Unmapped
*
* dc:coverage Text External The extent or scope of the resource.
*/
/**
* Author -> Creator
*
* Field: dc:creator
*
* Type: seq ProperName
*
* Category: External
*
* Description: The authors of the resource (listed in order of
* precedence, if significant).
*
* Bibtex-Fields used: author
*/
if ("author".equals(field)) {
String authors = resolvedEntry.getField(field);
AuthorList list = AuthorList.getAuthorList(authors);
int n = list.size();
for (int i = 0; i < n; i++) {
dcSchema.addCreator(list.getAuthor(i).getFirstLast(false));
}
continue;
}
if ("month".equals(field)) {
// Dealt with in year
continue;
}
if ("year".equals(field)) {
/**
* Year + Month -> Date
*
* Field: dc:date
*
* Type: seq Date
*
* Category: External
*
* Description: Date(s) that something interesting happened to
* the resource.
*
* Bibtex-Fields used: year, month
*/
String publicationDate = entry.getPublicationDate();
if (publicationDate != null) {
dcSchema.addSequenceValue("dc:date", publicationDate);
}
continue;
}
/**
* Abstract -> Description
*
* Field: dc:description
*
* Type: Lang Alt
*
* Category: External
*
* Description: A textual description of the content of the
* resource. Multiple values may be present for different languages.
*
* Bibtex-Fields used: abstract
*/
if ("abstract".equals(field)) {
String o = resolvedEntry.getField(field);
dcSchema.setDescription(o);
continue;
}
/**
* DOI -> identifier
*
* Field: dc:identifier
*
* Type: Text
*
* Category: External
*
* Description: Unique identifier of the resource.
*
* Bibtex-Fields used: doi
*/
if ("doi".equals(field)) {
String o = resolvedEntry.getField(field);
dcSchema.setIdentifier(o);
continue;
}
/**
* ? -> Language
*
* Unmapped
*
* dc:language bag Locale Internal An unordered array specifying the
* languages used in the resource.
*/
/**
* Publisher -> Publisher
*
* Field: dc:publisher
*
* Type: bag ProperName
*
* Category: External
*
* Description: Publishers.
*
* Bibtex-Fields used: doi
*/
if ("publisher".equals(field)) {
String o = entry.getField(field);
dcSchema.addPublisher(o);
continue;
}
/**
* ? -> Rights
*
* Unmapped
*
* dc:rights Lang Alt External Informal rights statement, selected
* by language.
*/
/**
* ? -> Source
*
* Unmapped
*
* dc:source Text External Unique identifier of the work from which
* this resource was derived.
*/
/**
* Keywords -> Subject
*
* Field: dc:subject
*
* Type: bag Text
*
* Category: External
*
* Description: An unordered array of descriptive phrases or
* keywords that specify the topic of the content of the resource.
*
* Bibtex-Fields used: doi
*/
if ("keywords".equals(field)) {
String o = entry.getField(field);
String[] keywords = o.split(",");
for (String keyword : keywords) {
dcSchema.addSubject(keyword.trim());
}
continue;
}
/**
* Title -> Title
*
* Field: dc:title
*
* Type: Lang Alt
*
* Category: External
*
* Description: The title of the document, or the name given to the
* resource. Typically, it will be a name by which the resource is
* formally known.
*
* Bibtex-Fields used: title
*/
if ("title".equals(field)) {
String o = entry.getField(field);
dcSchema.setTitle(o);
continue;
}
/**
* bibtextype -> relation
*
* Field: dc:relation
*
* Type: bag Text
*
* Category: External
*
* Description: Relationships to other documents.
*
* Bibtex-Fields used: bibtextype
*/
/**
* All others (including the bibtex key) get packaged in the
* relation attribute
*/
String o = entry.getField(field);
dcSchema.addRelation("bibtex/" + field + '/' + o);
}
/**
* ? -> Format
*
* Unmapped
*
* dc:format MIMEType Internal The file format used when saving the
* resource. Tools and applications should set this property to the save
* format of the data. It may include appropriate qualifiers.
*/
dcSchema.setFormat("application/pdf");
/**
* Type -> Type
*
* Field: dc:type
*
* Type: bag open Choice
*
* Category: External
*
* Description: A document type; for example, novel, poem, or working
* paper.
*
* Bibtex-Fields used: title
*/
TypedBibEntry typedEntry = new TypedBibEntry(entry, Optional.empty(), BibDatabaseMode.BIBTEX);
String o = typedEntry.getTypeForDisplay();
if (!o.isEmpty()) {
dcSchema.addType(o);
}
}
/**
* Try to write the given BibTexEntry as a DublinCore XMP Schema
*
* Existing DublinCore schemas in the document are not modified.
*
* @param document
* The pdf document to write to.
* @param entry
* The BibTeX entry that is written as a schema.
* @param database
* maybenull An optional database which the given BibTeX entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws IOException
* @throws TransformerException
*/
public static void writeDublinCore(PDDocument document, BibEntry entry,
BibDatabase database) throws IOException, TransformerException {
List<BibEntry> entries = new ArrayList<>();
entries.add(entry);
XMPUtil.writeDublinCore(document, entries, database);
}
/**
* Try to write the given BibTexEntries as DublinCore XMP Schemas
*
* Existing DublinCore schemas in the document are removed
*
* @param document
* The pdf document to write to.
* @param entries
* The BibTeX entries that are written as schemas
* @param database
* maybenull An optional database which the given BibTeX entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @throws IOException
* @throws TransformerException
*/
private static void writeDublinCore(PDDocument document,
Collection<BibEntry> entries, BibDatabase database)
throws IOException, TransformerException {
Collection<BibEntry> resolvedEntries;
if (database == null) {
resolvedEntries = entries;
} else {
resolvedEntries = database.resolveForStrings(entries, false);
}
PDDocumentCatalog catalog = document.getDocumentCatalog();
PDMetadata metaRaw = catalog.getMetadata();
XMPMetadata meta;
if (metaRaw == null) {
meta = new XMPMetadata();
} else {
meta = new XMPMetadata(XMLUtil.parse(metaRaw.createInputStream()));
}
// Remove all current Dublin-Core schemas
List<XMPSchema> schemas = meta
.getSchemasByNamespaceURI(XMPSchemaDublinCore.NAMESPACE);
for (XMPSchema schema : schemas) {
schema.getElement().getParentNode().removeChild(schema.getElement());
}
for (BibEntry entry : resolvedEntries) {
XMPSchemaDublinCore dcSchema = new XMPSchemaDublinCore(meta);
XMPUtil.writeToDCSchema(dcSchema, entry, null);
meta.addSchema(dcSchema);
}
// Save to stream and then input that stream to the PDF
ByteArrayOutputStream os = new ByteArrayOutputStream();
meta.save(os);
ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
PDMetadata metadataStream = new PDMetadata(document, is, false);
catalog.setMetadata(metadataStream);
}
/**
* Try to write the given BibTexEntry in the Document Information (the
* properties of the pdf).
*
* Existing fields values are overriden if the bibtex entry has the
* corresponding value set.
*
* @param document
* The pdf document to write to.
* @param entry
* The Bibtex entry that is written into the PDF properties. *
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
*/
private static void writeDocumentInformation(PDDocument document,
BibEntry entry, BibDatabase database) {
PDDocumentInformation di = document.getDocumentInformation();
BibEntry resolvedEntry;
if (database == null) {
resolvedEntry = entry;
} else {
resolvedEntry = database.resolveForStrings(entry, false);
}
// Query privacy filter settings
JabRefPreferences prefs = JabRefPreferences.getInstance();
boolean useXmpPrivacyFilter =
prefs.getBoolean(JabRefPreferences.USE_XMP_PRIVACY_FILTER);
// Fields for which not to write XMP data later on:
Set<String> filters = new TreeSet<>(prefs.getStringList(JabRefPreferences.XMP_PRIVACY_FILTERS));
// Set all the values including key and entryType
Set<String> fields = resolvedEntry.getFieldNames();
for (String field : fields) {
if (useXmpPrivacyFilter && filters.contains(field)) {
// erase field instead of adding it
if ("author".equals(field)) {
di.setAuthor(null);
} else if ("title".equals(field)) {
di.setTitle(null);
} else if ("keywords".equals(field)) {
di.setKeywords(null);
} else if ("abstract".equals(field)) {
di.setSubject(null);
} else {
di.setCustomMetadataValue("bibtex/" + field, null);
}
continue;
}
if ("author".equals(field)) {
di.setAuthor(resolvedEntry.getField("author"));
} else if ("title".equals(field)) {
di.setTitle(resolvedEntry.getField("title"));
} else if ("keywords".equals(field)) {
di.setKeywords(resolvedEntry.getField("keywords"));
} else if ("abstract".equals(field)) {
di.setSubject(resolvedEntry.getField("abstract"));
} else {
di.setCustomMetadataValue("bibtex/" + field, resolvedEntry.getField(field));
}
}
di.setCustomMetadataValue("bibtex/entrytype", EntryUtil.capitalizeFirst(resolvedEntry.getType()));
}
/**
* Try to write the given BibTexEntry in the XMP-stream of the given
* PDF-file.
*
* Throws an IOException if the file cannot be read or written, so the user
* can remove a lock or cancel the operation.
*
* The method will overwrite existing BibTeX-XMP-data, but keep other
* existing metadata.
*
* @param file
* The file to write the entries to.
* @param bibtexEntries
* The entries to write to the file. *
* @param database
* maybenull An optional database which the given bibtex entries
* belong to, which will be used to resolve strings. If the
* database is null the strings will not be resolved.
* @param writePDFInfo
* Write information also in PDF document properties
* @throws TransformerException
* If the entry was malformed or unsupported.
* @throws IOException
* If the file could not be written to or could not be found.
*/
public static void writeXMP(File file,
Collection<BibEntry> bibtexEntries, BibDatabase database,
boolean writePDFInfo) throws IOException, TransformerException {
Collection<BibEntry> resolvedEntries;
if (database == null) {
resolvedEntries = bibtexEntries;
} else {
resolvedEntries = database.resolveForStrings(bibtexEntries, false);
}
try (PDDocument document = PDDocument.load(file.getAbsoluteFile())) {
if (document.isEncrypted()) {
throw new EncryptionNotSupportedException(
"Error: Cannot add metadata to encrypted document.");
}
if (writePDFInfo && (resolvedEntries.size() == 1)) {
XMPUtil.writeDocumentInformation(document, resolvedEntries
.iterator().next(), null);
XMPUtil.writeDublinCore(document, resolvedEntries, null);
}
PDDocumentCatalog catalog = document.getDocumentCatalog();
PDMetadata metaRaw = catalog.getMetadata();
XMPMetadata meta;
if (metaRaw == null) {
meta = new XMPMetadata();
} else {
meta = new XMPMetadata(XMLUtil.parse(metaRaw.createInputStream()));
}
meta.addXMLNSMapping(XMPSchemaBibtex.NAMESPACE,
XMPSchemaBibtex.class);
// Remove all current Bibtex-schemas
List<XMPSchema> schemas = meta
.getSchemasByNamespaceURI(XMPSchemaBibtex.NAMESPACE);
for (XMPSchema schema : schemas) {
XMPSchemaBibtex bib = (XMPSchemaBibtex) schema;
bib.getElement().getParentNode().removeChild(bib.getElement());
}
for (BibEntry e : resolvedEntries) {
XMPSchemaBibtex bibtex = new XMPSchemaBibtex(meta);
meta.addSchema(bibtex);
bibtex.setBibtexEntry(e, null);
}
// Save to stream and then input that stream to the PDF
ByteArrayOutputStream os = new ByteArrayOutputStream();
meta.save(os);
ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
PDMetadata metadataStream = new PDMetadata(document, is, false);
catalog.setMetadata(metadataStream);
// Save
try {
document.save(file.getAbsolutePath());
} catch (COSVisitorException e) {
throw new TransformerException("Could not write XMP-metadata: "
+ e.getLocalizedMessage());
}
}
}
/**
* Print usage information for the command line tool xmpUtil.
*
* @see XMPUtil#main(String[])
*/
private static void usage() {
System.out.println("Read or write XMP-metadata from or to pdf file.");
System.out.println("");
System.out.println("Usage:");
System.out.println("Read from PDF and print as bibtex:");
System.out.println(" xmpUtil <pdf>");
System.out.println("Read from PDF and print raw XMP:");
System.out.println(" xmpUtil -x <pdf>");
System.out
.println("Write the entry in <bib> given by <key> to the PDF:");
System.out.println(" xmpUtil <key> <bib> <pdf>");
System.out.println("Write all entries in <bib> to the PDF:");
System.out.println(" xmpUtil <bib> <pdf>");
System.out.println("");
System.out
.println("To report bugs visit http://jabref.sourceforge.net");
}
/**
* Command-line tool for working with XMP-data.
*
* Read or write XMP-metadata from or to pdf file.
*
* Usage:
* <dl>
* <dd>Read from PDF and print as bibtex:</dd>
* <dt>xmpUtil PDF</dt>
* <dd>Read from PDF and print raw XMP:</dd>
* <dt>xmpUtil -x PDF</dt>
* <dd>Write the entry in BIB given by KEY to the PDF:</dd>
* <dt>xmpUtil KEY BIB PDF</dt>
* <dd>Write all entries in BIB to the PDF:</dd>
* <dt>xmpUtil BIB PDF</dt>
* </dl>
*
* @param args
* Command line strings passed to utility.
* @throws IOException
* If any of the given files could not be read or written.
* @throws TransformerException
* If the given BibEntry is malformed.
*/
public static void main(String[] args) throws IOException,
TransformerException {
// Don't forget to initialize the preferences
if (Globals.prefs == null) {
Globals.prefs = JabRefPreferences.getInstance();
}
switch (args.length) {
case 0:
XMPUtil.usage();
break;
case 1:
if (args[0].endsWith(".pdf")) {
// Read from pdf and write as BibTex
List<BibEntry> l = XMPUtil.readXMP(new File(args[0]));
BibEntryWriter bibtexEntryWriter = new BibEntryWriter(new LatexFieldFormatter(), false);
for (BibEntry entry : l) {
StringWriter sw = new StringWriter();
bibtexEntryWriter.write(entry, sw, BibDatabaseMode.BIBTEX);
System.out.println(sw.getBuffer());
}
} else if (args[0].endsWith(".bib")) {
// Read from bib and write as XMP
try (FileReader fr = new FileReader(args[0])) {
ParserResult result = BibtexParser.parse(fr);
Collection<BibEntry> entries = result.getDatabase().getEntries();
if (entries.isEmpty()) {
System.err.println("Could not find BibEntry in " + args[0]);
} else {
System.out.println(XMPUtil.toXMP(entries, result.getDatabase()));
}
}
} else {
XMPUtil.usage();
}
break;
case 2:
if ("-x".equals(args[0]) && args[1].endsWith(".pdf")) {
// Read from pdf and write as BibTex
Optional<XMPMetadata> meta = XMPUtil.readRawXMP(new File(args[1]));
if (meta.isPresent()) {
XMLUtil.save(meta.get().getXMPDocument(), System.out, StandardCharsets.UTF_8.name());
} else {
System.err
.println("The given pdf does not contain any XMP-metadata.");
}
break;
}
if (args[0].endsWith(".bib") && args[1].endsWith(".pdf")) {
ParserResult result = BibtexParser
.parse(new FileReader(args[0]));
Collection<BibEntry> entries = result.getDatabase()
.getEntries();
if (entries.isEmpty()) {
System.err.println("Could not find BibEntry in "
+ args[0]);
} else {
XMPUtil.writeXMP(new File(args[1]), entries, result
.getDatabase(), false);
System.out.println("XMP written.");
}
break;
}
XMPUtil.usage();
break;
case 3:
if (!args[1].endsWith(".bib") && !args[2].endsWith(".pdf")) {
XMPUtil.usage();
break;
}
ParserResult result = BibtexParser.parse(new FileReader(args[1]));
BibEntry bibEntry = result.getDatabase().getEntryByKey(args[0]);
if (bibEntry == null) {
System.err.println("Could not find BibEntry " + args[0]
+ " in " + args[0]);
} else {
XMPUtil.writeXMP(new File(args[2]), bibEntry, result.getDatabase());
System.out.println("XMP written.");
}
break;
default:
XMPUtil.usage();
}
}
/**
* see XMPUtil.hasMetadata(InputStream)
*/
public static boolean hasMetadata(Path path) {
try (InputStream inputStream = Files.newInputStream(path, StandardOpenOption.READ)) {
return hasMetadata(inputStream);
} catch (IOException e) {
LOGGER.error("XMP reading failed", e);
return false;
}
}
/**
* Will try to read XMP metadata from the given file, returning whether
* metadata was found.
*
* Caution: This method is as expensive as it is reading the actual metadata
* itself from the PDF.
*
* @param inputsStream
* The inputStream to read the PDF from.
* @return whether a BibEntry was found in the given PDF.
*/
public static boolean hasMetadata(InputStream inputsStream) {
try {
List<BibEntry> bibEntries = XMPUtil.readXMP(inputsStream);
return !bibEntries.isEmpty();
} catch (EncryptionNotSupportedException ex) {
LOGGER.info("Encryption not supported by XMPUtil");
return false;
} catch (IOException e) {
LOGGER.error("XMP reading failed", e);
return false;
}
}
}
| Fix typo and wrong Collections.EMPTY_LIST
Collections.emptyList() is better. See http://stackoverflow.com/a/14870838/873282
| src/main/java/net/sf/jabref/logic/xmp/XMPUtil.java | Fix typo and wrong Collections.EMPTY_LIST |
|
Java | mit | 250f8a5c09ad702f09a9a77f670e74d4647a7fd5 | 0 | alexsomai/run-selector-plugin,alexsomai/run-selector-plugin | /*
* The MIT License
*
* Copyright (c) 2013-2014, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.plugins.copyartifact.testutils;
import hudson.plugins.copyartifact.BuildSelector;
import hudson.plugins.copyartifact.CopyArtifact;
/**
* @author <a href="mailto:[email protected]">[email protected]</a>
*/
public class CopyArtifactUtil {
private CopyArtifactUtil() {
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String target,
boolean flatten, boolean optional) {
return createCopyArtifact(projectName, parameters, selector, filter, null, target, flatten, optional, false);
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String target,
boolean flatten, boolean optional, boolean fingerprintArtifacts) {
return createCopyArtifact(projectName, parameters, selector, filter, null, target, flatten, optional, fingerprintArtifacts);
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String excludes, String target,
boolean flatten, boolean optional, boolean fingerprintArtifacts) {
return createCopyArtifact(projectName, parameters, selector, filter, excludes, target, flatten, optional, fingerprintArtifacts, null);
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String excludes, String target,
boolean flatten, boolean optional, boolean fingerprintArtifacts, String resultVariableSuffix) {
CopyArtifact copyArtifact = new CopyArtifact(projectName);
copyArtifact.setParameters(parameters);
copyArtifact.setSelector(selector);
copyArtifact.setFilter(filter);
copyArtifact.setExcludes(excludes);
copyArtifact.setTarget(target);
copyArtifact.setFlatten(flatten);
copyArtifact.setOptional(optional);
copyArtifact.setFingerprintArtifacts(fingerprintArtifacts);
copyArtifact.setResultVariableSuffix(resultVariableSuffix);
return copyArtifact;
}
}
| src/test/java/hudson/plugins/copyartifact/testutils/CopyArtifactUtil.java | /*
* The MIT License
*
* Copyright (c) 2013-2014, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.plugins.copyartifact.testutils;
import hudson.plugins.copyartifact.BuildSelector;
import hudson.plugins.copyartifact.CopyArtifact;
/**
* @author <a href="mailto:[email protected]">[email protected]</a>
*/
public class CopyArtifactUtil {
private CopyArtifactUtil() {
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String target,
boolean flatten, boolean optional) {
return createCopyArtifact(projectName, parameters, selector, filter, null, target, flatten, optional, false);
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String target,
boolean flatten, boolean optional, boolean fingerprintArtifacts) {
return createCopyArtifact(projectName, parameters, selector, filter, null, target, flatten, optional, fingerprintArtifacts);
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String excludes, String target,
boolean flatten, boolean optional, boolean fingerprintArtifacts) {
return createCopyArtifact(projectName, parameters, selector, filter, null, target, flatten, optional, fingerprintArtifacts, null);
}
public static CopyArtifact createCopyArtifact(String projectName, String parameters, BuildSelector selector, String filter, String excludes, String target,
boolean flatten, boolean optional, boolean fingerprintArtifacts, String resultVariableSuffix) {
CopyArtifact copyArtifact = new CopyArtifact(projectName);
copyArtifact.setParameters(parameters);
copyArtifact.setSelector(selector);
copyArtifact.setFilter(filter);
copyArtifact.setExcludes(excludes);
copyArtifact.setTarget(target);
copyArtifact.setFlatten(flatten);
copyArtifact.setOptional(optional);
copyArtifact.setFingerprintArtifacts(fingerprintArtifacts);
copyArtifact.setResultVariableSuffix(resultVariableSuffix);
return copyArtifact;
}
}
| [JENKINS-18938] Fixed the test failure caused by a wrong change for test utility in the last two commits.
| src/test/java/hudson/plugins/copyartifact/testutils/CopyArtifactUtil.java | [JENKINS-18938] Fixed the test failure caused by a wrong change for test utility in the last two commits. |
|
Java | mit | 8efe7a77eb565740da2623c7de1aada2bd047038 | 0 | ugent-cros/cros-core,ugent-cros/cros-core,ugent-cros/cros-core | package drones.flightcontrol;
import akka.actor.ActorRef;
import droneapi.api.DroneCommander;
import droneapi.messages.FlyingStateChangedMessage;
import droneapi.messages.LocationChangedMessage;
import droneapi.messages.NavigationStateChangedMessage;
import droneapi.model.properties.FlyingState;
import droneapi.model.properties.Location;
import droneapi.model.properties.NavigationState;
import drones.flightcontrol.messages.*;
import drones.scheduler.messages.to.FlightCanceledMessage;
import drones.scheduler.messages.to.FlightCompletedMessage;
import models.Checkpoint;
import scala.concurrent.Await;
import scala.concurrent.duration.Duration;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* Basic implementation of a Pilot class. It will fly with the drone to its destinations via the wayPoints
* and will land on the last item in the list. It takes into account the waiting time of a wayPoint but not
* its altitude.
*
* When the SimplePilot is connected with a ControlTower it will send a request message before a take off or
* landing. When, subsequently, the RequestGrantedMessage is received it will execute the landing or take off
* and respond with a CompletedMessage.
*
* When a RequestMessage is received from another pilot it will check if its actual location is not within
* the NoFlyRange of the location of the request. If this is so, it will add the request location to the
* NoFlyPoint list and it will immediately respond with a RequestGrantedMessage. If this is not so, it will
* wait until the drone has left the request location.
*
* !!! WARNING 1: The SimplePilot assumes that there are no obstacles on the route that he will fly.
*
* !!! WARNING 2: When an error occurs, the pilot will go to a blocked state. It is the responsibility of
* the user to land the drone on a safe place.
*
* !!! WARNING 3: There can only be one pilot for each drone at any time.
*
* !!! WARNING 4: The drone should be landed before starting the pilot.
*
* Created by Sander on 18/03/2015.
*/
public class SimplePilot extends Pilot {
private Location actualLocation;
//wayPoints = route to fly
private List<Checkpoint> wayPoints;
private int actualWayPoint = -1;
//List of points where the drone cannot fly
private List<Location> noFlyPoints = new ArrayList<>();
//List of points(wrapped in messages) where the drone currently is but that need to be evacuated for a landing or take off.
private List<RequestMessage> evacuationPoints = new ArrayList<>();
//Range around a no fly point where the drone cannot fly.
private static final int NO_FY_RANGE = 10;
//Range around a evacuation point where the drone should be evacuated.
private static final int EVACUATION_RANGE = 15;
private boolean landed = true;
//True if the drone has taken off and is waiting to go up until cruising altitude
private boolean waitForTakeOffFinished = false;
//True is the drone is going up until cruising altitude and will wait to fly to the first wayPoint
private boolean waitForGoUpUntilCruisingAltitudeFinished = false;
//True if pilot is waiting for landing completed
private boolean waitForLandFinished = false;
//True if pilot is waiting for landing when een stopMessage has send
private boolean waitForLandAfterStopFinished = false;
//Buffer when waiting for takeoff or landed to send the completed message
private RequestMessage requestMessageBuffer = null;
//True when entered a no fly range
private boolean waitForLeavingNoFlyRange = false;
private boolean done = false;
/**
* @param reporterRef actor to report the outgoing messages
* @param droneId drone to control
* @param linkedWithControlTower true if connected to a ControlTower
* @param wayPoints route to fly, the drone will land on the last item
*/
public SimplePilot(ActorRef reporterRef, long droneId, boolean linkedWithControlTower, List<Checkpoint> wayPoints) {
super(reporterRef, droneId, linkedWithControlTower);
if (wayPoints.isEmpty()) {
throw new IllegalArgumentException("Waypoints must contain at least 1 element");
}
this.wayPoints = wayPoints;
}
/**
*
* @param reporterRef actor to report the outgoing messages
* @param droneId drone to control
* @param linkedWithControlTower true if connected to a ControlTower
* @param wayPoints route to fly, the drone will land on the last item
* @param cruisingAltitude cruisingAltitude of the drone
*/
public SimplePilot(ActorRef reporterRef, long droneId, boolean linkedWithControlTower, List<Checkpoint> wayPoints, double cruisingAltitude) {
this(reporterRef, droneId, linkedWithControlTower, wayPoints);
this.cruisingAltitude = cruisingAltitude;
}
/**
*
* @param reporterRef actor to report the outgoing messages
* @param droneId drone to control
* @param linkedWithControlTower true if connected to a ControlTower
* @param wayPoints route to fly, the drone will land on the last item
* @param cruisingAltitude cruisingAltitude of the drone
* @param noFlyPoints list of points where the drone cannot fly
*/
public SimplePilot(ActorRef reporterRef, long droneId, boolean linkedWithControlTower, List<Checkpoint> wayPoints, double cruisingAltitude, List<Location> noFlyPoints) {
this(reporterRef,droneId,linkedWithControlTower,wayPoints, cruisingAltitude);
this.cruisingAltitude = cruisingAltitude;
this.noFlyPoints = new ArrayList<>(noFlyPoints);
}
/**
* Use only for testing!
*/
public SimplePilot(ActorRef reporterRef, DroneCommander dc, boolean linkedWithControlTower, List<Checkpoint> wayPoints) {
super(reporterRef, dc, linkedWithControlTower);
if (wayPoints.isEmpty()) {
throw new IllegalArgumentException("Waypoints must contain at least 1 element");
}
this.wayPoints = wayPoints;
}
@Override
public void startFlightControlMessage() {
//Check if navigationState is "AVAILABLE"
try {
NavigationState m = Await.result(dc.getNavigationState(), MAX_DURATION_SHORT);
if(m != NavigationState.AVAILABLE){
handleErrorMessage("Can not start because NavigationState is not \"AVAILABLE\".");
return;
}
actualLocation = Await.result(dc.getLocation(), MAX_DURATION_SHORT);
} catch (Exception e) {
handleErrorMessage("Error while getting NavigationState after start");
return;
}
if (Double.doubleToRawLongBits(cruisingAltitude) == 0) {
cruisingAltitude = DEFAULT_ALTITUDE;
try {
Await.ready(dc.setMaxHeight((float) cruisingAltitude), MAX_DURATION_SHORT);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Failed to set max height after SetCruisingAltitudeMessage");
return;
}
}
blocked = false;
log.info("Pilot for drone " + droneId + " has started.");
takeOff();
}
@Override
protected void stopFlightControlMessage(StopFlightControlMessage m) {
log.info("Pilot for drone " + droneId + " has received a shut down message.");
//check if there was a request granted but not yet completed
if(linkedWithControlTower && requestMessageBuffer != null){
requestMessageBuffer = null;
reporterRef.tell(new CompletedMessage(requestMessageBuffer),self());
}
if(!landed){
try {
Await.ready(dc.land(), MAX_DURATION_LONG);
landed = true;
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no land drone after stop message");
return;
}
waitForLandAfterStopFinished = true;
} else {
stop();
}
}
private void stop(){
blocked = true;
dc.unsubscribe(self());
if(!done || linkedWithControlTower){
reporterRef.tell(new FlightCanceledMessage(droneId, done), self());
}
log.info("Pilot for drone " + droneId + " will shut down.");
//stop
getContext().stop(self());
}
protected void goToNextWaypoint() {
if (!blocked) {
actualWayPoint++;
if (actualWayPoint == 0){
log.info("Pilot for drone " + droneId + " will go to the first way point.");
models.Location newLocation = wayPoints.get(actualWayPoint).getLocation();
dc.moveToLocation(newLocation.getLatitude(), newLocation.getLongitude(), cruisingAltitude);
} else {
//wait at wayPoint
getContext().system().scheduler().scheduleOnce(Duration.create(wayPoints.get(actualWayPoint - 1).getWaitingTime(), TimeUnit.SECONDS),
new Runnable() {
@Override
public void run() {
self().tell(new WaitAtWayPointCompletedMessage(),self()); }
}, getContext().system().dispatcher());
}
}
}
@Override
protected void waitAtWayPointCompletedMessage(WaitAtWayPointCompletedMessage m) {
if(!blocked){
reporterRef.tell(new WayPointCompletedMessage(droneId, actualWayPoint -1), self());
if (actualWayPoint == wayPoints.size()) {
log.info("Pilot for drone " + droneId + " has arrived at last way point.");
//arrived at destination => land
land();
} else {
log.info("Pilot for drone " + droneId + " has arrived at way point " + (actualWayPoint - 1) + " and will go to the next one.");
//fly to next wayPoint
models.Location newLocation = wayPoints.get(actualWayPoint).getLocation();
dc.moveToLocation(newLocation.getLatitude(), newLocation.getLongitude(), cruisingAltitude);
}
}
}
private void land() {
if(!blocked){
if(linkedWithControlTower){
log.info("Pilot for drone " + droneId + " has sent a request for landing.");
reporterRef.tell(new RequestMessage(self(), actualLocation, AbstractFlightControlMessage.RequestType.LANDING, droneId), self());
} else {
log.info("Pilot for drone " + droneId + " has started the landing procedure.");
try {
Await.ready(dc.land(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no land drone after internal land command");
return;
}
waitForLandFinished = true;
}
}
}
private void takeOff() {
if(!blocked){
if(linkedWithControlTower){
log.info("Pilot for drone " + droneId + " has sent a request for take off.");
reporterRef.tell(new RequestMessage(self(),actualLocation, AbstractFlightControlMessage.RequestType.TAKEOFF, droneId),self());
} else {
log.info("Pilot for drone " + droneId + " has started the take off procedure.");
try {
Await.ready(dc.takeOff(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no take off drone after internal takeoff command");
return;
}
waitForTakeOffFinished = true;
}
}
}
/**
* Handles a RequestMessage of a other drone. A RequestMessage is sent when a drone wants to land or to take off.
*/
@Override
protected void requestMessage(RequestMessage m) {
if(blocked || landed){
noFlyPoints.add(m.getLocation());
reporterRef.tell(new RequestGrantedMessage(droneId,m), self());
log.info("Pilot for drone " + droneId + " has received a request from " + m.getDroneId() + " and has granted it.");
} else {
if (actualLocation.distance(m.getLocation()) <= EVACUATION_RANGE) {
evacuationPoints.add(m);
log.info("Pilot for drone " + droneId + " has received a request from " + m.getDroneId() + " and has added it to the evacuation points.");
} else {
noFlyPoints.add(m.getLocation());
reporterRef.tell(new RequestGrantedMessage(droneId,m), self());
log.info("Pilot for drone " + droneId + " has received a request from " + m.getDroneId() + " and has granted it.");
}
}
}
/**
* Handles a RequestGrantedMessage. A RequestGrantedMessage is sent to a class as a reply on a RequestMessage.
*/
@Override
protected void requestGrantedMessage(RequestGrantedMessage m) {
switch (m.getRequestMessage().getType()) {
case LANDING:
try {
Await.ready(dc.land(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no land drone after internal land command");
return;
}
waitForLandFinished = true;
if(linkedWithControlTower){
requestMessageBuffer = m.getRequestMessage();
}
log.info("Pilot for drone " + droneId + " has received a RequestGrantedMessage and has started the landing procedure.");
break;
case TAKEOFF:
try {
Await.ready(dc.takeOff(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no take off drone after internal takeoff command");
return;
}
waitForTakeOffFinished = true;
if(linkedWithControlTower){
requestMessageBuffer = m.getRequestMessage();
}
log.info("Pilot for drone " + droneId + " has received a RequestGrantedMessage and has started the take off procedure.");
break;
default:
log.warning("No handler for: [{}]", m.getRequestMessage().getType());
}
}
/**
* Handles CompletedMessage of a other drone. A CompletedMessage is sent when a other drone has completed his landing of take off that he has requested.
*/
@Override
protected void completedMessage(CompletedMessage m) {
log.info("Pilot for drone " + droneId + " has received a CompletedMessage.");
noFlyPoints.remove(m.getLocation());
}
@Override
protected void locationChanged(LocationChangedMessage m) {
if (!blocked && !waitForLandFinished && !waitForTakeOffFinished && !waitForGoUpUntilCruisingAltitudeFinished) {
actualLocation = new Location(m.getLatitude(), m.getLongitude(), m.getGpsHeight());
//use iterator
Iterator<RequestMessage> it = evacuationPoints.iterator();
while(it.hasNext()){
RequestMessage r = it.next();
if(actualLocation.distance(r.getLocation()) > EVACUATION_RANGE){
log.info("Pilot for drone " + droneId + " has left the evacuation range.");
//remove from list
it.remove();
noFlyPoints.add(r.getLocation());
reporterRef.tell(new RequestGrantedMessage(droneId,r),self());
}
}
for (Location l : noFlyPoints) {
if (actualLocation.distance(l) < NO_FY_RANGE && !landed) {
log.info("Pilot for drone " + droneId + " has entered a no fly range.");
//stop with flying
waitForLeavingNoFlyRange = true;
try {
Await.ready(dc.cancelMoveToLocation(), MAX_DURATION_SHORT);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Cannot cancelMoveToLocation, the drones will probably collide!!!");
}
return;
}
}
//Check if can fly further
if(waitForLeavingNoFlyRange){
waitForLeavingNoFlyRange = false;
//fly to next wayPoint
models.Location newLocation = wayPoints.get(actualWayPoint).getLocation();
dc.moveToLocation(newLocation.getLatitude(), newLocation.getLongitude(), cruisingAltitude);
log.info("Pilot for drone " + droneId + " can no fly further to the next way point: " + actualWayPoint + ".");
}
}
}
@Override
protected void flyingStateChanged(FlyingStateChangedMessage m) {
switch (m.getState()){
case HOVERING:
if(!blocked && waitForTakeOffFinished) {
waitForTakeOffFinished = false;
//go up until cruising altitude
try {
Await.ready(dc.moveToLocation(actualLocation.getLatitude(), actualLocation.getLongitude(), cruisingAltitude), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no send takeoff command to cruising altitude");
return;
}
waitForGoUpUntilCruisingAltitudeFinished = true;
log.info("Pilot for drone " + droneId + " has completed the first take off procedure and will now go up until cruising altitude.");
}
break;
case EMERGENCY:
handleErrorMessage("Drone in emergency");
landed = true;
break;
case LANDED:
if(!blocked && waitForLandFinished){
waitForLandFinished = false;
landed = true;
blocked = true;
if(linkedWithControlTower){
reporterRef.tell(new CompletedMessage(requestMessageBuffer), self());
requestMessageBuffer = null;
}
done = true;
reporterRef.tell(new FlightCompletedMessage(droneId, actualLocation), self());
log.info("Pilot for drone " + droneId + " has completed the landing procedure.");
return;
}
if(!blocked && waitForLandAfterStopFinished){
log.info("Pilot for drone " + droneId + " has completed the landing procedure.");
stop();
return;
}
landed = true;
blocked = true;
break;
}
}
@Override
protected void navigationStateChanged(NavigationStateChangedMessage m) {
if(!blocked && m.getState() == NavigationState.AVAILABLE){
switch (m.getReason()){
case FINISHED:
if(waitForGoUpUntilCruisingAltitudeFinished){
waitForGoUpUntilCruisingAltitudeFinished = false;
landed = false;
if(linkedWithControlTower){
reporterRef.tell(new CompletedMessage(requestMessageBuffer), self());
requestMessageBuffer = null;
}
log.info("Pilot for drone " + droneId + " has completed the second take off procedure.");
goToNextWaypoint();
break;
}
if(!waitForTakeOffFinished && !waitForLandAfterStopFinished && !waitForLandFinished && !waitForGoUpUntilCruisingAltitudeFinished){
goToNextWaypoint();
}
break;
case STOPPED:
if(!linkedWithControlTower){
handleErrorMessage("Navigation has stopped.");
}
}
}
}
@Override
protected void addNoFlyPointMessage(AddNoFlyPointMessage m) {
if (actualLocation.distance(m.getNoFlyPoint()) < NO_FY_RANGE) {
handleErrorMessage("You cannot add a drone within the no-fly-range " +
"of the location where another drone wants to land or to take off");
} else {
noFlyPoints.add(m.getNoFlyPoint());
}
}
private void handleErrorMessage(String s){
blocked = true;
reporterRef.tell(new FlightControlExceptionMessage(s,droneId),self());
log.error("FlightControl error with droneID " + droneId + ": " + s);
}
}
| app/drones/flightcontrol/SimplePilot.java | package drones.flightcontrol;
import akka.actor.ActorRef;
import droneapi.api.DroneCommander;
import droneapi.messages.FlyingStateChangedMessage;
import droneapi.messages.LocationChangedMessage;
import droneapi.messages.NavigationStateChangedMessage;
import droneapi.model.properties.Location;
import droneapi.model.properties.NavigationState;
import drones.flightcontrol.messages.*;
import drones.scheduler.messages.to.FlightCanceledMessage;
import drones.scheduler.messages.to.FlightCompletedMessage;
import models.Checkpoint;
import scala.concurrent.Await;
import scala.concurrent.duration.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* Basic implementation of a Pilot class. It will fly with the drone to its destinations via the wayPoints
* and will land on the last item in the list. It takes into account the waiting time of a wayPoint but not
* its altitude.
*
* When the SimplePilot is connected with a ControlTower it will send a request message before a take off or
* landing. When, subsequently, the RequestGrantedMessage is received it will execute the landing or take off
* and respond with a CompletedMessage.
*
* When a RequestMessage is received from another pilot it will check if its actual location is not within
* the NoFlyRange of the location of the request. If this is so, it will add the request location to the
* NoFlyPoint list and it will immediately respond with a RequestGrantedMessage. If this is not so, it will
* wait until the drone has left the request location.
*
* !!! WARNING 1: The SimplePilot assumes that there are no obstacles on the route that he will fly.
*
* !!! WARNING 2: When an error occurs, the pilot will go to a blocked state. It is the responsibility of
* the user to land the drone on a safe place.
*
* !!! WARNING 3: There can only be one pilot for each drone at any time.
*
* !!! WARNING 4: The drone should be landed before starting the pilot.
*
* Created by Sander on 18/03/2015.
*/
public class SimplePilot extends Pilot {
private Location actualLocation;
//wayPoints = route to fly
private List<Checkpoint> wayPoints;
private int actualWayPoint = -1;
//List of points where the drone cannot fly
private List<Location> noFlyPoints = new ArrayList<>();
//List of points(wrapped in messages) where the drone currently is but that need to be evacuated for a landing or take off.
private List<RequestMessage> evacuationPoints = new ArrayList<>();
//Range around a no fly point where the drone cannot fly.
private static final int NO_FY_RANGE = 15;
//Range around a evacuation point where the drone should be evacuated.
private static final int EVACUATION_RANGE = 10;
private boolean landed = true;
//True if the drone has taken off and is waiting to go up until cruising altitude
private boolean waitForTakeOffFinished = false;
//True is the drone is going up until cruising altitude and will wait to fly to the first wayPoint
private boolean waitForGoUpUntilCruisingAltitudeFinished = false;
//True if pilot is waiting for landing completed
private boolean waitForLandFinished = false;
//True if pilot is waiting for landing when een stopMessage has send
private boolean waitForLandAfterStopFinished = false;
//Buffer when waiting for takeoff or landed to send the completed message
private RequestMessage requestMessageBuffer = null;
private boolean done = false;
/**
* @param reporterRef actor to report the outgoing messages
* @param droneId drone to control
* @param linkedWithControlTower true if connected to a ControlTower
* @param wayPoints route to fly, the drone will land on the last item
*/
public SimplePilot(ActorRef reporterRef, long droneId, boolean linkedWithControlTower, List<Checkpoint> wayPoints) {
super(reporterRef, droneId, linkedWithControlTower);
if (wayPoints.isEmpty()) {
throw new IllegalArgumentException("Waypoints must contain at least 1 element");
}
this.wayPoints = wayPoints;
}
/**
*
* @param reporterRef actor to report the outgoing messages
* @param droneId drone to control
* @param linkedWithControlTower true if connected to a ControlTower
* @param wayPoints route to fly, the drone will land on the last item
* @param cruisingAltitude cruisingAltitude of the drone
*/
public SimplePilot(ActorRef reporterRef, long droneId, boolean linkedWithControlTower, List<Checkpoint> wayPoints, double cruisingAltitude) {
this(reporterRef, droneId, linkedWithControlTower, wayPoints);
this.cruisingAltitude = cruisingAltitude;
}
/**
*
* @param reporterRef actor to report the outgoing messages
* @param droneId drone to control
* @param linkedWithControlTower true if connected to a ControlTower
* @param wayPoints route to fly, the drone will land on the last item
* @param cruisingAltitude cruisingAltitude of the drone
* @param noFlyPoints list of points where the drone cannot fly
*/
public SimplePilot(ActorRef reporterRef, long droneId, boolean linkedWithControlTower, List<Checkpoint> wayPoints, double cruisingAltitude, List<Location> noFlyPoints) {
this(reporterRef,droneId,linkedWithControlTower,wayPoints, cruisingAltitude);
this.cruisingAltitude = cruisingAltitude;
this.noFlyPoints = new ArrayList<>(noFlyPoints);
}
/**
* Use only for testing!
*/
public SimplePilot(ActorRef reporterRef, DroneCommander dc, boolean linkedWithControlTower, List<Checkpoint> wayPoints) {
super(reporterRef, dc, linkedWithControlTower);
if (wayPoints.isEmpty()) {
throw new IllegalArgumentException("Waypoints must contain at least 1 element");
}
this.wayPoints = wayPoints;
}
@Override
public void startFlightControlMessage() {
//Check if navigationState is "AVAILABLE"
try {
NavigationState m = Await.result(dc.getNavigationState(), MAX_DURATION_SHORT);
if(m != NavigationState.AVAILABLE){
handleErrorMessage("Can not start because NavigationState is not \"AVAILABLE\".");
return;
}
actualLocation = Await.result(dc.getLocation(), MAX_DURATION_SHORT);
} catch (Exception e) {
handleErrorMessage("Error while getting NavigationState after start");
return;
}
if (Double.doubleToRawLongBits(cruisingAltitude) == 0) {
cruisingAltitude = DEFAULT_ALTITUDE;
try {
Await.ready(dc.setMaxHeight((float) cruisingAltitude), MAX_DURATION_SHORT);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Failed to set max height after SetCruisingAltitudeMessage");
return;
}
}
blocked = false;
takeOff();
}
@Override
protected void stopFlightControlMessage(StopFlightControlMessage m) {
//check if there was a request granted but not yet completed
if(linkedWithControlTower && requestMessageBuffer != null){
requestMessageBuffer = null;
reporterRef.tell(new CompletedMessage(requestMessageBuffer),self());
}
if(!landed){
try {
Await.ready(dc.land(), MAX_DURATION_LONG);
landed = true;
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no land drone after stop message");
return;
}
waitForLandAfterStopFinished = true;
} else {
stop();
}
}
private void stop(){
blocked = true;
dc.unsubscribe(self());
if(!done || linkedWithControlTower){
reporterRef.tell(new FlightCanceledMessage(droneId, done), self());
}
//stop
getContext().stop(self());
}
protected void goToNextWaypoint() {
if (!blocked) {
actualWayPoint++;
if (actualWayPoint == 0){
models.Location newLocation = wayPoints.get(actualWayPoint).getLocation();
dc.moveToLocation(newLocation.getLatitude(), newLocation.getLongitude(), cruisingAltitude);
} else {
//wait at wayPoint
getContext().system().scheduler().scheduleOnce(Duration.create(wayPoints.get(actualWayPoint - 1).getWaitingTime(), TimeUnit.SECONDS),
new Runnable() {
@Override
public void run() {
self().tell(new WaitAtWayPointCompletedMessage(),self()); }
}, getContext().system().dispatcher());
}
}
}
@Override
protected void waitAtWayPointCompletedMessage(WaitAtWayPointCompletedMessage m) {
if(!blocked){
reporterRef.tell(new WayPointCompletedMessage(droneId, actualWayPoint -1), self());
if (actualWayPoint == wayPoints.size()) {
//arrived at destination => land
land();
} else {
//fly to next wayPoint
models.Location newLocation = wayPoints.get(actualWayPoint).getLocation();
dc.moveToLocation(newLocation.getLatitude(), newLocation.getLongitude(), cruisingAltitude);
}
}
}
private void land() {
if(!blocked){
if(linkedWithControlTower){
reporterRef.tell(new RequestMessage(self(), actualLocation, AbstractFlightControlMessage.RequestType.LANDING, droneId), self());
} else {
try {
Await.ready(dc.land(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no land drone after internal land command");
return;
}
waitForLandFinished = true;
}
}
}
private void takeOff() {
if(!blocked){
if(linkedWithControlTower){
reporterRef.tell(new RequestMessage(self(),actualLocation, AbstractFlightControlMessage.RequestType.TAKEOFF, droneId),self());
} else {
try {
Await.ready(dc.takeOff(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no take off drone after internal takeoff command");
return;
}
waitForTakeOffFinished = true;
}
}
}
/**
* Handles a RequestMessage of a other drone. A RequestMessage is sent when a drone wants to land or to take off.
*/
@Override
protected void requestMessage(RequestMessage m) {
if(blocked){
noFlyPoints.add(m.getLocation());
reporterRef.tell(new RequestGrantedMessage(droneId,m), self());
} else {
if (actualLocation.distance(m.getLocation()) <= EVACUATION_RANGE) {
evacuationPoints.add(m);
} else {
noFlyPoints.add(m.getLocation());
reporterRef.tell(new RequestGrantedMessage(droneId,m), self());
}
}
}
/**
* Handles a RequestGrantedMessage. A RequestGrantedMessage is sent to a class as a reply on a RequestMessage.
*/
@Override
protected void requestGrantedMessage(RequestGrantedMessage m) {
switch (m.getRequestMessage().getType()) {
case LANDING:
try {
Await.ready(dc.land(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no land drone after internal land command");
return;
}
waitForLandFinished = true;
if(linkedWithControlTower){
requestMessageBuffer = m.getRequestMessage();
}
break;
case TAKEOFF:
try {
Await.ready(dc.takeOff(), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no take off drone after internal takeoff command");
return;
}
waitForTakeOffFinished = true;
if(linkedWithControlTower){
requestMessageBuffer = m.getRequestMessage();
}
break;
default:
log.warning("No handler for: [{}]", m.getRequestMessage().getType());
}
}
/**
* Handles CompletedMessage of a other drone. A CompletedMessage is sent when a other drone has completed his landing of take off that he has requested.
*/
@Override
protected void completedMessage(CompletedMessage m) {
noFlyPoints.remove(m.getLocation());
}
@Override
protected void locationChanged(LocationChangedMessage m) {
if (!blocked && !waitForLandFinished && !waitForTakeOffFinished && !waitForGoUpUntilCruisingAltitudeFinished) {
actualLocation = new Location(m.getLatitude(), m.getLongitude(), m.getGpsHeight());
for (RequestMessage r : evacuationPoints) {
if (actualLocation.distance(r.getLocation()) > EVACUATION_RANGE) {
evacuationPoints.remove(r);
noFlyPoints.add(r.getLocation());
reporterRef.tell(new RequestGrantedMessage(droneId,r),self());
}
}
for (Location l : noFlyPoints) {
if (actualLocation.distance(l) < NO_FY_RANGE) {
//stop with flying
try {
Await.ready(dc.cancelMoveToLocation(), MAX_DURATION_SHORT);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Cannot cancelMoveToLocation, the drones will probably collide!!!");
}
}
}
}
}
@Override
protected void flyingStateChanged(FlyingStateChangedMessage m) {
switch (m.getState()){
case HOVERING:
if(!blocked && waitForTakeOffFinished) {
waitForTakeOffFinished = false;
//go up until cruising altitude
try {
Await.ready(dc.moveToLocation(actualLocation.getLatitude(), actualLocation.getLongitude(), cruisingAltitude), MAX_DURATION_LONG);
} catch (TimeoutException | InterruptedException e) {
handleErrorMessage("Could no send takeoff command to cruising altitude");
return;
}
waitForGoUpUntilCruisingAltitudeFinished = true;
}
break;
case EMERGENCY:
handleErrorMessage("Drone in emergency");
landed = true;
break;
case LANDED:
if(!blocked && waitForLandFinished){
waitForLandFinished = false;
landed = true;
blocked = true;
if(linkedWithControlTower){
reporterRef.tell(new CompletedMessage(requestMessageBuffer), self());
requestMessageBuffer = null;
}
done = true;
reporterRef.tell(new FlightCompletedMessage(droneId, actualLocation), self());
return;
}
if(!blocked && waitForLandAfterStopFinished){
stop();
return;
}
landed = true;
blocked = true;
break;
}
}
@Override
protected void navigationStateChanged(NavigationStateChangedMessage m) {
if(!blocked && m.getState() == NavigationState.AVAILABLE){
switch (m.getReason()){
case FINISHED:
if(waitForGoUpUntilCruisingAltitudeFinished){
waitForGoUpUntilCruisingAltitudeFinished = false;
landed = false;
if(linkedWithControlTower){
reporterRef.tell(new CompletedMessage(requestMessageBuffer), self());
requestMessageBuffer = null;
}
goToNextWaypoint();
break;
}
if(!waitForTakeOffFinished && !waitForLandAfterStopFinished && !waitForLandFinished && !waitForGoUpUntilCruisingAltitudeFinished){
goToNextWaypoint();
}
break;
case STOPPED:
handleErrorMessage("Navigation has stopped.");
}
}
}
@Override
protected void addNoFlyPointMessage(AddNoFlyPointMessage m) {
if (actualLocation.distance(m.getNoFlyPoint()) < NO_FY_RANGE) {
handleErrorMessage("You cannot add a drone within the no-fly-range " +
"of the location where another drone wants to land or to take off");
} else {
noFlyPoints.add(m.getNoFlyPoint());
}
}
private void handleErrorMessage(String s){
blocked = true;
reporterRef.tell(new FlightControlExceptionMessage(s,droneId),self());
log.error("FlightControl error with droneID " + droneId + ": " + s);
}
}
| Simple Pilot fix iterator + log info
| app/drones/flightcontrol/SimplePilot.java | Simple Pilot fix iterator + log info |
|
Java | mit | 1e7d02fb712f0d6a5f795e2d8a049628a91480b9 | 0 | jenkinsci/analysis-model,jenkinsci/analysis-model,jenkinsci/analysis-model,jenkinsci/analysis-model | package edu.hm.hafner.analysis;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
/**
* Computes old, new, and fixed issues based on the reports of two consecutive static analysis runs for the same
* software artifact.
*
* @author Ullrich Hafner
*/
public class IssueDifference {
private final Report newIssues;
private final Report fixedIssues;
private final Report outstandingIssues;
private final HashMap<Integer, List<Issue>> referencesByHash;
private final HashMap<String, List<Issue>> referencesByFingerprint;
/**
* Creates a new instance of {@link IssueDifference}.
*
* @param currentIssues
* the issues of the current report
* @param referenceId
* ID identifying the reference report
* @param referenceIssues
* the issues of a previous report (reference)
*/
public IssueDifference(final Report currentIssues, final String referenceId, final Report referenceIssues) {
newIssues = currentIssues.copy();
fixedIssues = referenceIssues.copy();
outstandingIssues = new Report();
referencesByHash = new HashMap();
referencesByFingerprint = new HashMap();
for (Issue issue : fixedIssues) {
addIssueToMap(referencesByHash, issue.hashCode(), issue);
addIssueToMap(referencesByFingerprint, issue.getFingerprint(), issue);
}
List<UUID> removed = matchIssuesByEquals(currentIssues);
Report secondPass = currentIssues.copy();
removed.forEach(secondPass::remove);
matchIssuesByFingerprint(secondPass);
newIssues.forEach(issue -> issue.setReference(referenceId));
}
private List<UUID> matchIssuesByEquals(final Report currentIssues) {
List<UUID> removedIds = new ArrayList<>();
for (Issue current : currentIssues) {
List<Issue> equalIssues = findReferenceByEquals(current);
if (!equalIssues.isEmpty()) {
removedIds.add(remove(current, selectIssueWithSameFingerprint(current, equalIssues)));
}
}
return removedIds;
}
private void matchIssuesByFingerprint(final Report currentIssues) {
for (Issue current : currentIssues) {
findReferenceByFingerprint(current).ifPresent(issue -> remove(current, issue));
}
}
private <Key> void addIssueToMap(HashMap<Key, List<Issue>> map, Key key, Issue issue){
List<Issue> issues = map.get(key);
if(issues == null){
issues = new ArrayList();
map.put(key, issues);
}
issues.add(issue);
}
private <Key> void removeIssueFromMap(HashMap<Key, List<Issue>> map, Key key, Issue issue){
List<Issue> issues = map.get(key);
issues.remove(issue);
if(issues.isEmpty()){
map.remove(key);
}
}
private UUID remove(final Issue current, final Issue oldIssue) {
UUID id = current.getId();
Issue issueWithLatestProperties = newIssues.remove(id);
issueWithLatestProperties.setReference(oldIssue.getReference());
outstandingIssues.add(issueWithLatestProperties);
fixedIssues.remove(oldIssue.getId());
removeIssueFromMap(referencesByFingerprint, oldIssue.getFingerprint(), oldIssue);
removeIssueFromMap(referencesByHash, oldIssue.hashCode(), oldIssue);
return id;
}
private Issue selectIssueWithSameFingerprint(final Issue current, final List<Issue> equalIssues) {
return equalIssues.stream()
.filter(issue -> issue.getFingerprint().equals(current.getFingerprint()))
.findFirst()
.orElse(equalIssues.get(0));
}
private Optional<Issue> findReferenceByFingerprint(final Issue current) {
List<Issue> references = referencesByFingerprint.get(current.getFingerprint());
if (references != null) {
return Optional.of(references.get(0));
}
return Optional.empty();
}
private List<Issue> findReferenceByEquals(final Issue current) {
List<Issue> references = referencesByHash.get(current.hashCode());
List<Issue> equalIssues = new ArrayList<>();
if (references != null) {
for (Issue reference : references) {
if (current.equals(reference)) {
equalIssues.add(reference);
}
}
}
return equalIssues;
}
/**
* Returns the outstanding issues. I.e. all issues, that are part of the previous report and that are still part of
* the current report.
*
* @return the outstanding issues
*/
public Report getOutstandingIssues() {
return outstandingIssues;
}
/**
* Returns the new issues. I.e. all issues, that are part of the current report but that have not been shown up in
* the previous report.
*
* @return the new issues
*/
public Report getNewIssues() {
return newIssues;
}
/**
* Returns the fixed issues. I.e. all issues, that are part of the previous report but that are not present in the
* current report anymore.
*
* @return the fixed issues
*/
public Report getFixedIssues() {
return fixedIssues;
}
}
| src/main/java/edu/hm/hafner/analysis/IssueDifference.java | package edu.hm.hafner.analysis;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
/**
* Computes old, new, and fixed issues based on the reports of two consecutive static analysis runs for the same
* software artifact.
*
* @author Ullrich Hafner
*/
public class IssueDifference {
private final Report newIssues;
private final Report fixedIssues;
private final Report outstandingIssues;
/**
* Creates a new instance of {@link IssueDifference}.
*
* @param currentIssues
* the issues of the current report
* @param referenceId
* ID identifying the reference report
* @param referenceIssues
* the issues of a previous report (reference)
*/
public IssueDifference(final Report currentIssues, final String referenceId, final Report referenceIssues) {
newIssues = currentIssues.copy();
fixedIssues = referenceIssues.copy();
outstandingIssues = new Report();
List<UUID> removed = matchIssuesByEquals(currentIssues);
Report secondPass = currentIssues.copy();
removed.forEach(secondPass::remove);
matchIssuesByFingerprint(secondPass);
newIssues.forEach(issue -> issue.setReference(referenceId));
}
private List<UUID> matchIssuesByEquals(final Report currentIssues) {
List<UUID> removedIds = new ArrayList<>();
for (Issue current : currentIssues) {
List<Issue> equalIssues = findReferenceByEquals(current);
if (!equalIssues.isEmpty()) {
removedIds.add(remove(current, selectIssueWithSameFingerprint(current, equalIssues)));
}
}
return removedIds;
}
private void matchIssuesByFingerprint(final Report currentIssues) {
for (Issue current : currentIssues) {
findReferenceByFingerprint(current).ifPresent(issue -> remove(current, issue));
}
}
private UUID remove(final Issue current, final Issue oldIssue) {
UUID id = current.getId();
Issue issueWithLatestProperties = newIssues.remove(id);
issueWithLatestProperties.setReference(oldIssue.getReference());
outstandingIssues.add(issueWithLatestProperties);
fixedIssues.remove(oldIssue.getId());
return id;
}
private Issue selectIssueWithSameFingerprint(final Issue current, final List<Issue> equalIssues) {
return equalIssues.stream()
.filter(issue -> issue.getFingerprint().equals(current.getFingerprint()))
.findFirst()
.orElse(equalIssues.get(0));
}
private Optional<Issue> findReferenceByFingerprint(final Issue current) {
for (Issue reference : fixedIssues) {
if (current.getFingerprint().equals(reference.getFingerprint())) {
return Optional.of(reference);
}
}
return Optional.empty();
}
private List<Issue> findReferenceByEquals(final Issue current) {
List<Issue> equalIssues = new ArrayList<>();
for (Issue reference : fixedIssues) {
if (current.equals(reference)) {
equalIssues.add(reference);
}
}
return equalIssues;
}
/**
* Returns the outstanding issues. I.e. all issues, that are part of the previous report and that are still part of
* the current report.
*
* @return the outstanding issues
*/
public Report getOutstandingIssues() {
return outstandingIssues;
}
/**
* Returns the new issues. I.e. all issues, that are part of the current report but that have not been shown up in
* the previous report.
*
* @return the new issues
*/
public Report getNewIssues() {
return newIssues;
}
/**
* Returns the fixed issues. I.e. all issues, that are part of the previous report but that are not present in the
* current report anymore.
*
* @return the fixed issues
*/
public Report getFixedIssues() {
return fixedIssues;
}
}
| Avoid quadratic times.
| src/main/java/edu/hm/hafner/analysis/IssueDifference.java | Avoid quadratic times. |
|
Java | mit | ac871ba96f746933c46ce6ff624ed0598e77b51b | 0 | skuzzle/TinyPlugz | package de.skuzzle.tinyplugz;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.ServiceLoader;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides a fluent builder API for configuring an application wide single
* {@link TinyPlugz} instance.
*
* @author Simon Taddiken
*/
public final class TinyPlugzConfigurator {
/** Default resource name of tiny plugz class path configuration */
public static final String TINY_PLUGZ_CONFIG = "tiny-plugz.properties";
private static final Logger LOG = LoggerFactory.getLogger(TinyPlugz.class);
/** Lock which synchronizes every non-trivial access to TinyPlugz.instance. */
protected static final Object DEPLOY_LOCK = new Object();
private TinyPlugzConfigurator() {
// hidden constructor
}
/**
* Sets up a {@link TinyPlugz} instance which uses the current thread's
* context Classloader as parent Classloader.
* <p>
* This Classloader will be used for several purposes.
* First, it serves as parent Classloader for the plugin Classloader which
* is to be created to access classes and configurations from plugins.
* Second, the Classloader will be used to look up the TinyPlugz service
* provider either using the {@link ServiceLoader} or by looking up an
* explicit implementation class.
* <p>
* This method will fail immediately if TinyPlugz already has been
* configured.
*
* @return Fluent builder object for further configuration.
*/
public static DefineProperties setup() {
return new Impl(Thread.currentThread().getContextClassLoader());
}
/**
* Sets up a {@link TinyPlugz} instance which uses the given Classloader as
* parent Classloader.
* <p>
* This Classloader will be used for several purposes.
* First, it serves as parent Classloader for the plugin Classloader which
* is to be created to access classes and configurations from plugins.
* Second, the Classloader will be used to look up the TinyPlugz service
* provider either using the {@link ServiceLoader} or by looking up an
* explicit implementation class.
* <p>
* This method will fail immediately if TinyPlugz already has been
* configured.
*
* @param parentClassLoader The parent Classloader to use.
* @return Fluent builder object for further configuration.
*/
public static DefineProperties setupUsingParent(ClassLoader parentClassLoader) {
Require.nonNull(parentClassLoader, "parentClassLoader");
return new Impl(parentClassLoader);
}
/**
* Sets up a {@link TinyPlugz} instance which uses the Classloader which
* loaded the {@link TinyPlugzConfigurator} class as parent Classloader.
* <p>
* This Classloader will be used for several purposes.
* First, it serves as parent Classloader for the plugin Classloader which
* is to be created to access classes and configurations from plugins.
* Second, the Classloader will be used to look up the TinyPlugz service
* provider either using the {@link ServiceLoader} or by looking up an
* explicit implementation class.
* <p>
* This method will fail immediately if TinyPlugz already has been
* configured.
*
* @return Fluent builder object for further configuration.
*/
public static DefineProperties setupUsingApplicationClassLoader() {
return new Impl(TinyPlugzConfigurator.class.getClassLoader());
}
/**
* Part of the fluent configurator API. Used to define configuration
* properties and the plugins to be used.
*
* @author Simon Taddiken
*/
public static interface DefineProperties {
/**
* Adds properties read from {@value #TINY_PLUGZ_CONFIG} file from the
* class path.
*
* @return A fluent builder object for further configuration.
* @throws IllegalStateException If the file can not be found.
*/
public DefineProperties withClasspathProperties();
/**
* Adds properties read from the class path using the given resource
* name.
*
* @param resourceName Name of the properties file resource.
* @return A fluent builder object for further configuration.
* @throws IllegalStateException If the file can not be found.
*/
public DefineProperties withClasspathProperties(String resourceName);
/**
* Specifies a single property to insert into the map which will be
* passed to
* {@link TinyPlugz#initialize(java.util.Collection, ClassLoader, Map)}
* .
*
* @param name Name of the property.
* @param value Value of the property.
* @return A fluent builder object for further configuration.
*/
DefineProperties withProperty(String name, Object value);
/**
* Specifies a property without value. It will automatically get
* assigned a non-null value.
*
* @param name Name of the property.
* @return A fluent builder object for further configuration.
*/
DefineProperties withProperty(String name);
/**
* Makes all {@link System#getProperties() system properties} available
* in the map passed to
* {@link TinyPlugz#initialize(Collection, ClassLoader, Map)}.
*
* @return A fluent builder object for further configuration.
* @since 0.2.0
*/
DefineProperties withSystemProperties();
/**
* Specifies a multiple properties to insert into the map which will be
* passed to
* {@link TinyPlugz#initialize(java.util.Collection, ClassLoader, Map)}
* .
*
* @param values Mappings to add.
* @return A fluent builder object for further configuration.
*/
DefineProperties withProperties(Map<? extends Object, ? extends Object> values);
/**
* Provides the {@link PluginSource} via the given consumer for adding
* plugins which should be deployed.
*
* @param source Consumer for modifying a PluginSourcce.
* @return A fluent builder object for further configuration.
*/
DeployTinyPlugz withPlugins(Consumer<PluginSource> source);
}
/**
* Part of the fluent configurator API. Represents the final step and allows
* to actually deploy the configured TinyPlugz instance.
*
* @author Simon Taddiken
*/
public interface DeployTinyPlugz {
/**
* Finally deploys the {@link TinyPlugz} instance using the the
* configured values. The configured instance will be globally
* accessible using {@link TinyPlugz#getInstance()}.
*
* @return The configured instance.
* @throws TinyPlugzException When initializing TinyPlugz with the
* current configuration fails.
*/
public TinyPlugz deploy();
}
private static final class Impl implements DefineProperties, DeployTinyPlugz {
private static final Object NON_NULL_VALUE = new Object();
private final Map<Object, Object> properties;
private final PluginSourceBuilderImpl builder;
private final ClassLoader parentCl;
private Impl(ClassLoader parentCl) {
Require.state(!TinyPlugz.isDeployed(), "TinyPlugz already deployed");
this.parentCl = parentCl;
this.properties = new HashMap<>();
this.builder = new PluginSourceBuilderImpl();
}
@Override
public DefineProperties withClasspathProperties() {
return withClasspathProperties(TINY_PLUGZ_CONFIG);
}
@Override
public DefineProperties withClasspathProperties(String resourceName) {
Require.nonNull(resourceName, "resourceName");
final URL url = this.parentCl.getResource(resourceName);
Require.state(url != null, "Resource <%s> not found", resourceName);
final Properties props = new Properties();
try (final InputStream in = url.openStream()) {
props.load(in);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Resource <%s> could not be read", resourceName),
e);
}
this.properties.putAll(props);
return this;
}
@Override
public DefineProperties withProperty(String name, Object value) {
Require.nonNull(name, "name");
this.properties.put(name, value);
return this;
}
@Override
public DefineProperties withProperty(String name) {
return withProperty(name, NON_NULL_VALUE);
}
@Override
public DefineProperties withSystemProperties() {
return withProperties(System.getProperties());
}
@Override
public DefineProperties withProperties(
Map<? extends Object, ? extends Object> values) {
Require.nonNull(values, "values");
this.properties.putAll(values);
return this;
}
@Override
public DeployTinyPlugz withPlugins(Consumer<PluginSource> source) {
Require.nonNull(source, "source");
source.accept(this.builder);
return this;
}
@Override
public TinyPlugz deploy() {
validateProperties();
synchronized (DEPLOY_LOCK) {
// additional synchronized check is required
Require.state(!TinyPlugz.isDeployed(), "TinyPlugz already deployed");
final TinyPlugz impl = getInstance();
LOG.debug("Using '{}' TinyPlugz implementation",
impl.getClass().getName());
final Collection<URL> plugins = this.builder.getPluginUrls()
.collect(Collectors.toList());
impl.initialize(plugins, this.parentCl, this.properties);
TinyPlugz.deploy(impl);
return impl;
}
}
private TinyPlugz getInstance() {
final TinyPlugzLookUp lookup;
if (this.properties.get(Options.FORCE_DEFAULT) != null) {
lookup = TinyPlugzLookUp.DEFAULT_INSTANCE_STRATEGY;
} else if (this.properties.get(Options.FORCE_IMPLEMENTATION) != null) {
lookup = TinyPlugzLookUp.STATIC_STRATEGY;
} else {
lookup = TinyPlugzLookUp.SPI_STRATEGY;
}
LOG.debug("Using '{}' for instantiating TinyPlugz",
lookup.getClass().getName());
return lookup.getInstance(this.parentCl, this.properties);
}
private void validateProperties() {
final Object forceDefault = this.properties.get(Options.FORCE_DEFAULT);
final Object forceImplementation = this.properties.get(
Options.FORCE_IMPLEMENTATION);
if (forceDefault != null && forceImplementation != null) {
throw new TinyPlugzException("Can not use 'FORCE_IMPLEMENTATION' " +
"together with 'FORCE_DEFAULT'");
}
}
}
/**
* Default TinyPlugz implementation which will be used if no other service
* provider is found. It relies solely on the defaultXXX methods of the
* TinyPlugz class.
*
* @author Simon Taddiken
*/
static final class TinyPlugzImpl extends TinyPlugz {
private final ServiceLoaderWrapper serviceLoader;
private ClassLoader pluginClassLoader;
TinyPlugzImpl() {
this.serviceLoader = new DefaultServiceLoaderWrapper();
}
@Override
protected final void initialize(Collection<URL> urls,
ClassLoader parentClassLoader, Map<Object, Object> properties) {
this.pluginClassLoader = createClassLoader(urls, parentClassLoader);
}
@Override
protected final void dispose() {
defaultDispose();
}
@Override
public final ClassLoader getClassLoader() {
return this.pluginClassLoader;
}
@Override
public final void runMain(String className, String[] args) {
defaultRunMain(className, args);
}
@Override
public final Optional<URL> getResource(String name) {
return defaultGetResource(name);
}
@Override
public final ElementIterator<URL> getResources(String name) throws IOException {
return defaultGetResources(name);
}
@Override
public final void contextClassLoaderScope(ContextAction action) {
defaultContextClassLoaderScope(action);
}
@Override
public final <T> ElementIterator<T> getServices(Class<T> type) {
Require.nonNull(type, "type");
return ElementIterator.wrap(
this.serviceLoader.loadService(type, this.pluginClassLoader));
}
@Override
public final <T> Optional<T> getFirstService(Class<T> type) {
return defaultGetFirstService(type);
}
@Override
public final <T> T getService(Class<T> type) {
return defaultGetService(type);
}
}
}
| tiny-plugz/src/main/java/de/skuzzle/tinyplugz/TinyPlugzConfigurator.java | package de.skuzzle.tinyplugz;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.ServiceLoader;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provides a fluent builder API for configuring an application wide single
* {@link TinyPlugz} instance.
*
* @author Simon Taddiken
*/
public final class TinyPlugzConfigurator {
/** Default resource name of tiny plugz class path configuration */
public static final String TINY_PLUGZ_CONFIG = "tiny-plugz.properties";
private static final Logger LOG = LoggerFactory.getLogger(TinyPlugz.class);
/** Lock which synchronizes every non-trivial access to TinyPlugz.instance. */
protected static final Object DEPLOY_LOCK = new Object();
private TinyPlugzConfigurator() {
// hidden constructor
}
/**
* Sets up a {@link TinyPlugz} instance which uses the current thread's
* context Classloader as parent Classloader.
* <p>
* This Classloader will be used for several purposes.
* First, it serves as parent Classloader for the plugin Classloader which
* is to be created to access classes and configurations from plugins.
* Second, the Classloader will be used to look up the TinyPlugz service
* provider either using the {@link ServiceLoader} or by looking up an
* explicit implementation class.
* <p>
* This method will fail immediately if TinyPlugz already has been
* configured.
*
* @return Fluent builder object for further configuration.
*/
public static DefineProperties setup() {
return new Impl(Thread.currentThread().getContextClassLoader());
}
/**
* Sets up a {@link TinyPlugz} instance which uses the given Classloader as
* parent Classloader.
* <p>
* This Classloader will be used for several purposes.
* First, it serves as parent Classloader for the plugin Classloader which
* is to be created to access classes and configurations from plugins.
* Second, the Classloader will be used to look up the TinyPlugz service
* provider either using the {@link ServiceLoader} or by looking up an
* explicit implementation class.
* <p>
* This method will fail immediately if TinyPlugz already has been
* configured.
*
* @param parentClassLoader The parent Classloader to use.
* @return Fluent builder object for further configuration.
*/
public static DefineProperties setupUsingParent(ClassLoader parentClassLoader) {
Require.nonNull(parentClassLoader, "parentClassLoader");
return new Impl(parentClassLoader);
}
/**
* Sets up a {@link TinyPlugz} instance which uses the Classloader which
* loaded the {@link TinyPlugzConfigurator} class as parent Classloader.
* <p>
* This Classloader will be used for several purposes.
* First, it serves as parent Classloader for the plugin Classloader which
* is to be created to access classes and configurations from plugins.
* Second, the Classloader will be used to look up the TinyPlugz service
* provider either using the {@link ServiceLoader} or by looking up an
* explicit implementation class.
* <p>
* This method will fail immediately if TinyPlugz already has been
* configured.
*
* @return Fluent builder object for further configuration.
*/
public static DefineProperties setupUsingApplicationClassLoader() {
return new Impl(TinyPlugzConfigurator.class.getClassLoader());
}
/**
* Part of the fluent configurator API. Used to define configuration
* properties and the plugins to be used.
*
* @author Simon Taddiken
*/
public static interface DefineProperties {
/**
* Adds properties read from {@value #TINY_PLUGZ_CONFIG} file from the
* class path.
*
* @return A fluent builder object for further configuration.
* @throws IllegalStateException If the file can not be found.
*/
public DefineProperties withClasspathProperties();
/**
* Adds properties read from the class path using the given resource
* name.
*
* @param resourceName Name of the properties file resource.
* @return A fluent builder object for further configuration.
* @throws IllegalStateException If the file can not be found.
*/
public DefineProperties withClasspathProperties(String resourceName);
/**
* Specifies a single property to insert into the map which will be
* passed to
* {@link TinyPlugz#initialize(java.util.Collection, ClassLoader, Map)}
* .
*
* @param name Name of the property.
* @param value Value of the property.
* @return A fluent builder object for further configuration.
*/
DefineProperties withProperty(String name, Object value);
/**
* Specifies a property without value. It will automatically get
* assigned a non-null value.
*
* @param name Name of the property.
* @return A fluent builder object for further configuration.
*/
DefineProperties withProperty(String name);
/**
* Makes all {@link System#getProperties() system properties} available
* in the map passed to
* {@link TinyPlugz#initialize(Collection, ClassLoader, Map)}.
*
* @return A fluent builder object for further configuration.
* @since 0.2.0
*/
DefineProperties withSystemProperties();
/**
* Specifies a multiple properties to insert into the map which will be
* passed to
* {@link TinyPlugz#initialize(java.util.Collection, ClassLoader, Map)}
* .
*
* @param values Mappings to add.
* @return A fluent builder object for further configuration.
*/
DefineProperties withProperties(Map<? extends Object, ? extends Object> values);
/**
* Provides the {@link PluginSource} via the given consumer for adding
* plugins which should be deployed.
*
* @param source Consumer for modifying a PluginSourcce.
* @return A fluent builder object for further configuration.
*/
DeployTinyPlugz withPlugins(Consumer<PluginSource> source);
}
/**
* Part of the fluent configurator API. Represents the final step and allows
* to actually deploy the configured TinyPlugz instance.
*
* @author Simon Taddiken
*/
public interface DeployTinyPlugz {
/**
* Finally deploys the {@link TinyPlugz} instance using the the
* configured values. The configured instance will be globally
* accessible using {@link TinyPlugz#getInstance()}.
*
* @return The configured instance.
* @throws TinyPlugzException When initializing TinyPlugz with the
* current configuration fails.
*/
public TinyPlugz deploy();
}
private static final class Impl implements DefineProperties, DeployTinyPlugz {
private static final Object NON_NULL_VALUE = new Object();
private final Map<Object, Object> properties;
private final PluginSourceBuilderImpl builder;
private final ClassLoader parentCl;
private Impl(ClassLoader parentCl) {
Require.state(!TinyPlugz.isDeployed(), "TinyPlugz already deployed");
this.parentCl = parentCl;
this.properties = new HashMap<>();
this.builder = new PluginSourceBuilderImpl();
}
@Override
public DefineProperties withClasspathProperties() {
return withClasspathProperties(TINY_PLUGZ_CONFIG);
}
@Override
public DefineProperties withClasspathProperties(String resourceName) {
Require.nonNull(resourceName, "resourceName");
final URL url = this.parentCl.getResource(resourceName);
Require.state(url != null, "Resource <%s> not found", resourceName);
final Properties props = new Properties();
try (final InputStream in = url.openStream()) {
props.load(in);
} catch (IOException e) {
throw new IllegalStateException(
String.format("Resource <%s> could not be read", resourceName),
e);
}
this.properties.putAll(props);
return this;
}
@Override
public DefineProperties withProperty(String name, Object value) {
Require.nonNull(name, "name");
this.properties.put(name, value);
return this;
}
@Override
public DefineProperties withProperty(String name) {
return withProperty(name, NON_NULL_VALUE);
}
@Override
public DefineProperties withSystemProperties() {
return withProperties(System.getProperties());
}
@Override
public DefineProperties withProperties(
Map<? extends Object, ? extends Object> values) {
Require.nonNull(values, "values");
this.properties.putAll(values);
return this;
}
@Override
public DeployTinyPlugz withPlugins(Consumer<PluginSource> source) {
Require.nonNull(source, "source");
source.accept(this.builder);
return this;
}
@Override
public TinyPlugz deploy() {
validateProperties();
synchronized (DEPLOY_LOCK) {
// additional synchronized check is required
Require.state(!TinyPlugz.isDeployed(), "TinyPlugz already deployed");
final TinyPlugz impl = getInstance();
LOG.debug("Using '{}' TinyPlugz implementation",
impl.getClass().getName());
final Collection<URL> plugins = this.builder.getPluginUrls()
.collect(Collectors.toList());
impl.initialize(plugins, this.parentCl,
this.properties);
TinyPlugz.deploy(impl);
return impl;
}
}
private TinyPlugz getInstance() {
final TinyPlugzLookUp lookup;
if (this.properties.get(Options.FORCE_DEFAULT) != null) {
lookup = TinyPlugzLookUp.DEFAULT_INSTANCE_STRATEGY;
} else if (this.properties.get(Options.FORCE_IMPLEMENTATION) != null) {
lookup = TinyPlugzLookUp.STATIC_STRATEGY;
} else {
lookup = TinyPlugzLookUp.SPI_STRATEGY;
}
LOG.debug("Using '{}' for instantiating TinyPlugz",
lookup.getClass().getName());
return lookup.getInstance(this.parentCl, this.properties);
}
private void validateProperties() {
final Object forceDefault = this.properties.get(Options.FORCE_DEFAULT);
final Object forceImplementation = this.properties.get(
Options.FORCE_IMPLEMENTATION);
if (forceDefault != null && forceImplementation != null) {
throw new TinyPlugzException("Can not use 'FORCE_IMPLEMENTATION' " +
"together with 'FORCE_DEFAULT'");
}
}
}
/**
* Default TinyPlugz implementation which will be used if no other service
* provider is found. It relies solely on the defaultXXX methods of the
* TinyPlugz class.
*
* @author Simon Taddiken
*/
static final class TinyPlugzImpl extends TinyPlugz {
private final ServiceLoaderWrapper serviceLoader;
private ClassLoader pluginClassLoader;
TinyPlugzImpl() {
this.serviceLoader = new DefaultServiceLoaderWrapper();
}
@Override
protected final void initialize(Collection<URL> urls,
ClassLoader parentClassLoader, Map<Object, Object> properties) {
this.pluginClassLoader = createClassLoader(urls, parentClassLoader);
}
@Override
protected final void dispose() {
defaultDispose();
}
@Override
public final ClassLoader getClassLoader() {
return this.pluginClassLoader;
}
@Override
public final void runMain(String className, String[] args) {
defaultRunMain(className, args);
}
@Override
public final Optional<URL> getResource(String name) {
return defaultGetResource(name);
}
@Override
public final ElementIterator<URL> getResources(String name) throws IOException {
return defaultGetResources(name);
}
@Override
public final void contextClassLoaderScope(ContextAction action) {
defaultContextClassLoaderScope(action);
}
@Override
public final <T> ElementIterator<T> getServices(Class<T> type) {
Require.nonNull(type, "type");
return ElementIterator.wrap(
this.serviceLoader.loadService(type, this.pluginClassLoader));
}
@Override
public final <T> Optional<T> getFirstService(Class<T> type) {
return defaultGetFirstService(type);
}
@Override
public final <T> T getService(Class<T> type) {
return defaultGetService(type);
}
}
}
| formatting
| tiny-plugz/src/main/java/de/skuzzle/tinyplugz/TinyPlugzConfigurator.java | formatting |
|
Java | agpl-3.0 | 866dd838a02887377ad44e7bcb5168f826e41e55 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 6b78fd30-2e60-11e5-9284-b827eb9e62be | hello.java | 6b7395e8-2e60-11e5-9284-b827eb9e62be | 6b78fd30-2e60-11e5-9284-b827eb9e62be | hello.java | 6b78fd30-2e60-11e5-9284-b827eb9e62be |
|
Java | lgpl-2.1 | badffd8b86c52566bc78f2ecdd9e2a18d9f5a9b5 | 0 | concord-consortium/data | package org.concord.data.state;
import org.concord.data.Unit;
import org.concord.data.stream.TimerDataStoreDataProducer;
import org.concord.framework.data.stream.DataChannelDescription;
import org.concord.framework.data.stream.DataStore;
import org.concord.framework.data.stream.DataStreamDescription;
import org.concord.framework.otrunk.DefaultOTController;
import org.concord.framework.otrunk.OTObject;
import org.concord.framework.otrunk.OTObjectList;
public class OTTimerDataStoreDataProducerController extends DefaultOTController
{
public static Class [] realObjectClasses = {TimerDataStoreDataProducer.class};
public static Class otObjectClass = OTTimerDataStoreDataProducer.class;
public void loadRealObject(Object realObject)
{
TimerDataStoreDataProducer dataProducer = (TimerDataStoreDataProducer) realObject;
OTTimerDataStoreDataProducer otDataProducer = (OTTimerDataStoreDataProducer) otObject;
float sampleTime = otDataProducer.getSampleTime();
DataStreamDescription dataDescription = dataProducer.getDataDescription();
dataDescription.setDt(sampleTime);
DataChannelDescription chDesc = dataDescription.getDtChannelDescription();
chDesc.setUnit(Unit.getUnit(Unit.UNIT_CODE_S));
chDesc.setName("time");
if (otDataProducer.getDataStore() != null) {
// for backwards compatibility
dataProducer.setNumChannels(2);
dataProducer.getDataDescription().setDataType(DataStreamDescription.DATA_SERIES);
// FIXME Eventually this could be abstracted and settable on the otobject
chDesc = dataDescription.getChannelDescription(0);
chDesc.setUnit(Unit.getUnit(Unit.UNIT_CODE_METER));
chDesc.setName("distance");
DataStore dataStore = (DataStore) controllerService.getRealObject(otDataProducer.getDataStore());
dataProducer.addDataStore(dataStore);
} else {
OTObjectList dataStores = otDataProducer.getDataStores();
dataProducer.setNumChannels(dataStores.size() + 1);
dataProducer.getDataDescription().setDataType(DataStreamDescription.DATA_SERIES);
int channelNum = 0;
for (OTObject store : dataStores) {
// FIXME Eventually this could be abstracted and settable on the otobject
chDesc = dataDescription.getChannelDescription(channelNum);
chDesc.setUnit(Unit.getUnit(Unit.UNIT_CODE_METER));
chDesc.setName("distance");
channelNum++;
DataStore dataStore = (DataStore) controllerService.getRealObject(store);
dataProducer.addDataStore(dataStore);
}
}
}
public void registerRealObject(Object realObject)
{
// TODO Auto-generated method stub
}
public void saveRealObject(Object realObject)
{
// TODO Auto-generated method stub
}
@Override
public boolean isRealObjectSharable(OTObject otObject, Object realObject) {
return true;
}
}
| src/org/concord/data/state/OTTimerDataStoreDataProducerController.java | package org.concord.data.state;
import org.concord.data.Unit;
import org.concord.data.stream.TimerDataStoreDataProducer;
import org.concord.framework.data.stream.DataChannelDescription;
import org.concord.framework.data.stream.DataStore;
import org.concord.framework.data.stream.DataStreamDescription;
import org.concord.framework.otrunk.DefaultOTController;
import org.concord.framework.otrunk.OTObject;
import org.concord.framework.otrunk.OTObjectList;
public class OTTimerDataStoreDataProducerController extends DefaultOTController
{
public static Class [] realObjectClasses = {TimerDataStoreDataProducer.class};
public static Class otObjectClass = OTTimerDataStoreDataProducer.class;
public void loadRealObject(Object realObject)
{
TimerDataStoreDataProducer dataProducer = (TimerDataStoreDataProducer) realObject;
OTTimerDataStoreDataProducer otDataProducer = (OTTimerDataStoreDataProducer) otObject;
float sampleTime = otDataProducer.getSampleTime();
DataStreamDescription dataDescription = dataProducer.getDataDescription();
dataDescription.setDt(sampleTime);
DataChannelDescription chDesc = dataDescription.getDtChannelDescription();
chDesc.setUnit(Unit.getUnit(Unit.UNIT_CODE_S));
chDesc.setName("time");
if (otDataProducer.getDataStore() != null) {
// for backwards compatibility
dataProducer.setNumChannels(2);
dataProducer.getDataDescription().setDataType(DataStreamDescription.DATA_SERIES);
// FIXME Eventually this could be abstracted and settable on the otobject
chDesc = dataDescription.getChannelDescription(0);
chDesc.setUnit(Unit.getUnit(Unit.UNIT_CODE_METER));
chDesc.setName("distance");
DataStore dataStore = (DataStore) controllerService.getRealObject(otDataProducer.getDataStore());
dataProducer.addDataStore(dataStore);
} else {
OTObjectList dataStores = otDataProducer.getDataStores();
dataProducer.setNumChannels(dataStores.size() + 1);
dataProducer.getDataDescription().setDataType(DataStreamDescription.DATA_SERIES);
int channelNum = 0;
for (OTObject store : dataStores) {
// FIXME Eventually this could be abstracted and settable on the otobject
chDesc = dataDescription.getChannelDescription(channelNum);
chDesc.setUnit(Unit.getUnit(Unit.UNIT_CODE_METER));
chDesc.setName("distance");
channelNum++;
DataStore dataStore = (DataStore) controllerService.getRealObject(store);
dataProducer.addDataStore(dataStore);
}
}
}
public void registerRealObject(Object realObject)
{
// TODO Auto-generated method stub
}
public void saveRealObject(Object realObject)
{
// TODO Auto-generated method stub
}
}
| Make sure the controller always returns the same TimerDataStoreDataProducer object for the same OT object, so that events triggered on one graph can affect the producer attached to other objects.
git-svn-id: 68ef3ef1a966669f855010d8185b1a1aba9038fc@23256 6e01202a-0783-4428-890a-84243c50cc2b
| src/org/concord/data/state/OTTimerDataStoreDataProducerController.java | Make sure the controller always returns the same TimerDataStoreDataProducer object for the same OT object, so that events triggered on one graph can affect the producer attached to other objects. |
|
Java | lgpl-2.1 | b5e23513abcf818f0bf89df242c846dc30b5e97f | 0 | samskivert/samskivert,samskivert/samskivert | //
// $Id: Tuple.java,v 1.5 2002/12/19 22:41:13 mdb Exp $
//
// samskivert library - useful routines for java programs
// Copyright (C) 2001 Michael Bayne
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.util;
import java.io.Serializable;
/**
* A tuple is a simple object that holds a reference to two other objects.
* It provides hashcode and equality semantics that allow it to be used to
* combine two objects into a single key (for hashtables, etc.).
*/
public class Tuple implements Serializable
{
/** The left object. */
public Object left;
/** The right object. */
public Object right;
/** Construct a tuple with the specified two objects. */
public Tuple (Object left, Object right)
{
this.left = left;
this.right = right;
}
/** Construct a blank tuple. */
public Tuple ()
{
}
/**
* Returns the combined hashcode of the two elements.
*/
public int hashCode ()
{
return left.hashCode() ^ right.hashCode();
}
/**
* A tuple is equal to another tuple if the left and right elements
* are equal to the left and right elements (respectively) of the
* other tuple.
*/
public boolean equals (Object other)
{
if (other instanceof Tuple) {
Tuple to = (Tuple)other;
return (left.equals(to.left) && right.equals(to.right));
} else {
return false;
}
}
/**
* Generates a string representation of this tuple.
*/
public String toString ()
{
return "[left=" + left + ", right=" + right + "]";
}
/** Change this if the fields or inheritance hierarchy ever changes
* (which is extremely unlikely). We override this because I'm tired
* of serialized crap not working depending on whether I compiled with
* jikes or javac. */
private static final long serialVersionUID = 1;
}
| projects/samskivert/src/java/com/samskivert/util/Tuple.java | //
// $Id: Tuple.java,v 1.5 2002/12/19 22:41:13 mdb Exp $
//
// samskivert library - useful routines for java programs
// Copyright (C) 2001 Michael Bayne
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.util;
import java.io.Serializable;
/**
* A tuple is a simple object that holds a reference to two other objects.
* It provides hashcode and equality semantics that allow it to be used to
* combine two objects into a single key (for hashtables, etc.).
*/
public class Tuple implements Serializable
{
/** The left objecto. */
public Object left;
/** The right object. */
public Object right;
/** Construct a tuple with the specified two objects. */
public Tuple (Object left, Object right)
{
this.left = left;
this.right = right;
}
/** Construct a blank tuple. */
public Tuple ()
{
}
/**
* Returns the combined hashcode of the two elements.
*/
public int hashCode ()
{
return left.hashCode() ^ right.hashCode();
}
/**
* A tuple is equal to another tuple if the left and right elements
* are equal to the left and right elements (respectively) of the
* other tuple.
*/
public boolean equals (Object other)
{
if (other instanceof Tuple) {
Tuple to = (Tuple)other;
return (left.equals(to.left) && right.equals(to.right));
} else {
return false;
}
}
/**
* Generates a string representation of this tuple.
*/
public String toString ()
{
return "[left=" + left + ", right=" + right + "]";
}
/** Change this if the fields or inheritance hierarchy ever changes
* (which is extremely unlikely). We override this because I'm tired
* of serialized crap not working depending on whether I compiled with
* jikes or javac. */
private static final long serialVersionUID = 1;
}
| Re-fixed typo, oh.
git-svn-id: 64ebf368729f38804935acb7146e017e0f909c6b@1525 6335cc39-0255-0410-8fd6-9bcaacd3b74c
| projects/samskivert/src/java/com/samskivert/util/Tuple.java | Re-fixed typo, oh. |
|
Java | lgpl-2.1 | 545739221730b672ada2295028bb4cde009803c1 | 0 | drhee/toxoMine,drhee/toxoMine,drhee/toxoMine,drhee/toxoMine,drhee/toxoMine,drhee/toxoMine,drhee/toxoMine,drhee/toxoMine,drhee/toxoMine | package org.intermine.webservice.server;
/*
* Copyright (C) 2002-2011 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.servlet.RequestDispatcher;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.intermine.api.InterMineAPI;
import org.intermine.api.profile.Profile;
import org.intermine.api.profile.ProfileManager;
import org.intermine.api.profile.ProfileManager.ApiPermission;
import org.intermine.api.profile.ProfileManager.AuthenticationException;
import org.intermine.util.StringUtil;
import org.intermine.web.logic.export.ResponseUtil;
import org.intermine.web.logic.profile.LoginHandler;
import org.intermine.web.logic.session.SessionMethods;
import org.intermine.webservice.server.exceptions.BadRequestException;
import org.intermine.webservice.server.exceptions.InternalErrorException;
import org.intermine.webservice.server.exceptions.ServiceException;
import org.intermine.webservice.server.exceptions.ServiceForbiddenException;
import org.intermine.webservice.server.output.CSVFormatter;
import org.intermine.webservice.server.output.JSONCountFormatter;
import org.intermine.webservice.server.output.JSONFormatter;
import org.intermine.webservice.server.output.JSONObjectFormatter;
import org.intermine.webservice.server.output.JSONResultFormatter;
import org.intermine.webservice.server.output.JSONRowFormatter;
import org.intermine.webservice.server.output.JSONTableFormatter;
import org.intermine.webservice.server.output.MemoryOutput;
import org.intermine.webservice.server.output.Output;
import org.intermine.webservice.server.output.StreamedOutput;
import org.intermine.webservice.server.output.TabFormatter;
import org.intermine.webservice.server.output.XMLFormatter;
/**
*
* Base class for web services. See methods of class to be able implement
* subclass. <h3>Output</h3> There can be 3 types of output:
* <ul>
* <li>Only Error output
* <li>Complete results - xml, tab separated, html
* <li>Incomplete results - error messages are appended at the end
* </ul>
*
* <h3>Web service design</h3>
* <ul>
* <li>Request is parsed with corresponding RequestProcessor class and returned
* as a corresponding Input class.
* <li>Web services are subclasses of WebService class.
* <li>Web services use implementations of Output class to print results.
* <li>Request parameter names are constants in corresponding
* RequestProcessorBase subclass.
* <li>Servlets are used only for forwarding to corresponding web service, that
* is created always new. With this implementation fields of new service are
* correctly initialized and there don't stay values from previous requests.
* </ul>
* For using of web services see InterMine wiki pages.
*
* @author Jakub Kulaviak
* @author Alex Kalderimis
* @version
*/
public abstract class WebService
{
/** Default jsonp callback **/
public static final String DEFAULT_CALLBACK = "callback";
/** The format for when no value is given **/
public static final int EMPTY_FORMAT = -1;
/** The Unknown format **/
public static final int UNKNOWN_FORMAT = -2;
/** XML format constant **/
public static final int XML_FORMAT = 0;
/** TSV format constant **/
public static final int TSV_FORMAT = 1;
/** HTML format constant **/
public static final int HTML_FORMAT = 2;
/** CSV format constant **/
public static final int CSV_FORMAT = 3;
/** Count format constant **/
public static final int COUNT_FORMAT = 4;
// FORMAT CONSTANTS BETWEEN 20-40 ARE RESERVED FOR JSON FORMATS!!
/** Start of JSON format range **/
public static final int JSON_RANGE_START = 20;
/** End of JSON format range **/
public static final int JSON_RANGE_END = 40;
/** JSONP format constant **/
public static final int JSON_FORMAT = 20;
/** JSONP format constant **/
public static final int JSONP_FORMAT = 21;
/** JSON Object format constant **/
public static final int JSON_OBJ_FORMAT = 22;
/** JSONP Object format constant **/
public static final int JSONP_OBJ_FORMAT = 23;
/** JSON Table format constant **/
public static final int JSON_TABLE_FORMAT = 24;
/** JSONP Table format constant **/
public static final int JSONP_TABLE_FORMAT = 25;
/** JSON Row format constant **/
public static final int JSON_ROW_FORMAT = 26;
/** JSONP Row format constant **/
public static final int JSONP_ROW_FORMAT = 27;
/** JSON count format constant **/
public static final int JSON_COUNT_FORMAT = 28;
/** JSONP count format constant **/
public static final int JSONP_COUNT_FORMAT = 29;
/** JSON data table format constant **/
public static final int JSON_DATA_TABLE_FORMAT = 30;
/** JSONP data table format constant **/
public static final int JSONP_DATA_TABLE_FORMAT = 31;
private static final String COMPRESS = "compress";
private static final String GZIP = "gzip";
private static final String ZIP = "zip";
private static final String WEB_SERVICE_DISABLED_PROPERTY = "webservice.disabled";
private static final Logger LOG = Logger.getLogger(WebService.class);
private static final String FORWARD_PATH = "/webservice/table.jsp";
private static final String AUTHENTICATION_FIELD_NAME = "Authorization";
private static final String AUTH_TOKEN_PARAM_KEY = "token";
private static final Profile ANON_PROFILE = new AnonProfile();
protected HttpServletRequest request;
protected HttpServletResponse response;
protected Output output;
protected InterMineAPI im;
private ApiPermission permission = null;
/**
* Construct the web service with the InterMine API object that gives access
* to the core InterMine functionality.
*
* @param im the InterMine application
*/
public WebService(InterMineAPI im) {
this.im = im;
}
/**
* Starting method of web service. The web service should be run like
*
* <pre>
* new ListsService().service(request, response);
* </pre>
*
* Ensures initialisation of web service and makes steps common for all web
* services and after that executes the <tt>execute</tt> method, that should be
* overwritten with each web service.
*
* @param request The request, as received by the servlet.
* @param response The response, as handled by the servlet.
*/
public void service(HttpServletRequest request, HttpServletResponse response) {
try {
this.request = request;
try {
request.setCharacterEncoding("UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error(ex);
}
this.response = response;
response.setHeader("Access-Control-Allow-Origin", "*"); // Allow cross domain requests.
initOutput(response);
checkEnabled();
authenticate();
initState();
validateState();
execute();
} catch (Throwable t) {
sendError(t, response);
}
try {
output.flush();
} catch (Throwable t) {
logError(t, "Error flushing", 500);
}
try {
cleanUp();
} catch (Throwable t) {
LOG.error("Error cleaning up", t);
}
// Do not persist sessions. All requests should be state-less.
request.getSession().invalidate();
}
private void checkEnabled() {
Properties webProperties = SessionMethods.getWebProperties(request
.getSession().getServletContext());
if ("true".equalsIgnoreCase(webProperties
.getProperty(WEB_SERVICE_DISABLED_PROPERTY))) {
throw new ServiceForbiddenException("Web service is disabled.");
}
}
/**
* Subclasses may put clean-up code here, to be run after the request has been executed.
*/
protected void cleanUp() {
// No-op stub.
}
/**
* Subclasses can put initialisation here.
*/
protected void initState() {
// No-op stub
}
/**
* Subclasses can put initialisation checks here.
* The main use case is for confirming
* authentication.
*/
protected void validateState() {
// No-op stub
}
/**
* If user name and password is specified in request, then it setups user
* profile in session. User was authenticated. It uses HTTP basic access
* authentication.
* {@link "http://en.wikipedia.org/wiki/Basic_access_authentication"}
*/
private void authenticate() {
final String authToken = request.getParameter(AUTH_TOKEN_PARAM_KEY);
final ProfileManager pm = im.getProfileManager();
final HttpSession session = request.getSession();
// Anonymous requests get the anonymous profile.
SessionMethods.setProfile(session, ANON_PROFILE);
try {
if (StringUtils.isEmpty(authToken)) {
final String authString = request.getHeader(AUTHENTICATION_FIELD_NAME);
if (StringUtils.isEmpty(authString) || formatIsJSONP()) {
return; // Not Authenticated.
}
// Strip off the "Basic" part - but don't require it.
final String encoded = authString.substring(authString.indexOf("Basic ") + 1);
final String decoded = new String(Base64.decodeBase64(encoded.getBytes()));
final String[] parts = decoded.split(":", 2);
if (parts.length != 2) {
throw new BadRequestException(
"Invalid request authentication. "
+ "Authorization field contains invalid value. "
+ "Decoded authorization value: " + parts[0]);
}
final String username = parts[0];
final String password = parts[1];
permission = pm.getPermission(username, password, im.getClassKeys());
} else {
permission = pm.getPermission(authToken, im.getClassKeys());
}
} catch (AuthenticationException e) {
throw new ServiceForbiddenException(e.getMessage(), e);
}
LoginHandler.setUpProfile(session, permission.getProfile());
}
private void sendError(Throwable t, HttpServletResponse response) {
String msg = WebServiceConstants.SERVICE_FAILED_MSG;
if (t.getMessage() != null && t.getMessage().length() >= 0) {
msg = t.getMessage();
}
int code;
if (t instanceof ServiceException) {
ServiceException ex = (ServiceException) t;
code = ex.getHttpErrorCode();
} else {
code = Output.SC_INTERNAL_SERVER_ERROR;
}
logError(t, msg, code);
if (!formatIsJSONP()) {
// Don't set errors statuses on jsonp requests, to enable
// better error checking in the browser.
response.setStatus(code);
} else {
// But do set callbacks
String callback = getCallback();
if (callback == null) {
callback = "makeInterMineResultsTable";
}
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put(JSONResultFormatter.KEY_CALLBACK, callback);
output.setHeaderAttributes(attributes);
}
output.setError(msg, code);
LOG.debug("Set error to : " + msg + "," + code);
}
private void logError(Throwable t, String msg, int code) {
// Stack traces for all!
ByteArrayOutputStream b = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(b);
t.printStackTrace(ps);
ps.flush();
if (code == Output.SC_INTERNAL_SERVER_ERROR) {
LOG.debug("Service failed by internal error. Request parameters: \n"
+ requestParametersToString() + b.toString());
} else {
LOG.debug("Service didn't succeed. It's not an internal error. "
+ "Reason: " + getErrorDescription(msg, code) + "\n" + b.toString());
}
}
private String requestParametersToString() {
StringBuilder sb = new StringBuilder();
Map<String, String[]> map = request.getParameterMap();
for (String name : map.keySet()) {
for (String value : map.get(name)) {
sb.append(name);
sb.append(": ");
sb.append(value);
sb.append("\n");
}
}
return sb.toString();
}
private String getErrorDescription(String msg, int errorCode) {
StringBuilder sb = new StringBuilder();
sb.append(StatusDictionary.getDescription(errorCode));
sb.append(msg);
return sb.toString();
}
/**
* @return Whether or not the requested result format is one of our JSON formats.
*/
protected boolean formatIsJSON() {
int format = getFormat();
return (format >= JSON_RANGE_START && format <= JSON_RANGE_END);
}
/**
* @return Whether or not the format is a JSON-P format
*/
protected boolean formatIsJSONP() {
return formatIsJSON() && (getFormat() % 2 == 1);
}
/**
* @return Whether or not the format is for JSON-Objects
*/
protected boolean formatIsJsonObj() {
int format = getFormat();
return (format == JSON_OBJ_FORMAT || format == JSONP_OBJ_FORMAT);
}
/**
* @return Whether or not the format is a flat-file format
*/
protected boolean formatIsFlatFile() {
int format = getFormat();
return (format == TSV_FORMAT || format == CSV_FORMAT);
}
/**
* Returns true if the format requires the count, rather than the full or
* paged result set.
* @return a truth value
*/
public boolean formatIsCount() {
int format = getFormat();
switch (format) {
case COUNT_FORMAT:
return true;
case JSONP_COUNT_FORMAT:
return true;
case JSON_COUNT_FORMAT:
return true;
default:
return false;
}
}
/**
* @return Whether or not the format is XML.
*/
public boolean formatIsXML() {
return (getFormat() == XML_FORMAT);
}
/**
* Make the XML output given the HttpResponse's PrintWriter.
* @param out The PrintWriter from the HttpResponse.
* @return An Output that produces good XML.
*/
protected Output makeXMLOutput(PrintWriter out) {
ResponseUtil.setXMLHeader(response, "result.xml");
return new StreamedOutput(out, new XMLFormatter());
}
/**
* Make the default JSON output given the HttpResponse's PrintWriter.
* @param out The PrintWriter from the HttpResponse.
* @return An Output that produces good JSON.
*/
protected Output makeJSONOutput(PrintWriter out) {
return new StreamedOutput(out, new JSONFormatter());
}
/**
* @return Whether or not this request wants gzipped data.
*/
protected boolean isGzip() {
return GZIP.equalsIgnoreCase(request.getParameter(COMPRESS));
}
/**
* @return Whether or not this request wants zipped data.
*/
protected boolean isZip() {
return ZIP.equalsIgnoreCase(request.getParameter(COMPRESS));
}
/**
* @return Whether or not this request wants uncompressed data.
*/
protected boolean isUncompressed() {
return StringUtils.isEmpty(request.getParameter(COMPRESS));
}
/**
* @return the file-name extension for the result-set.
*/
protected String getExtension() {
if (isGzip()) {
return ".gz";
} else if (isZip()) {
return ".zip";
} else {
return "";
}
}
private void initOutput(HttpServletResponse response) {
int format = getFormat();
// HTML is a special case
if (format == HTML_FORMAT) {
output = new MemoryOutput();
ResponseUtil.setHTMLContentType(response);
return;
}
PrintWriter out;
OutputStream os;
try {
// set reasonable buffer size
response.setBufferSize(8 * 1024);
os = response.getOutputStream();
if (isGzip()) {
os = new GZIPOutputStream(os);
} else if (isZip()) {
os = new ZipOutputStream(new BufferedOutputStream(os));
}
out = new PrintWriter(os);
} catch (IOException e) {
throw new InternalErrorException(e);
}
String filename = getDefaultFileName();
switch (format) {
case XML_FORMAT:
output = makeXMLOutput(out);
break;
case TSV_FORMAT:
output = new StreamedOutput(out, new TabFormatter());
filename = "result.tsv";
if (isUncompressed()) {
ResponseUtil.setTabHeader(response, filename);
}
break;
case CSV_FORMAT:
output = new StreamedOutput(out, new CSVFormatter());
filename = "result.csv";
if (isUncompressed()) {
ResponseUtil.setCSVHeader(response, filename);
}
break;
case COUNT_FORMAT:
output = new StreamedOutput(out, new TabFormatter());
filename = "resultcount.txt";
if (isUncompressed()) {
ResponseUtil.setPlainTextHeader(response, filename);
}
break;
case JSON_FORMAT:
output = makeJSONOutput(out);
filename = "result.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_FORMAT:
output = makeJSONOutput(out);
filename = "result.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_OBJ_FORMAT:
output = new StreamedOutput(out, new JSONObjectFormatter());
filename = "result.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_OBJ_FORMAT:
output = new StreamedOutput(out, new JSONObjectFormatter());
filename = "result.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_DATA_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_DATA_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_ROW_FORMAT:
output = new StreamedOutput(out, new JSONRowFormatter());
ResponseUtil.setJSONHeader(response,
"result.json" + getExtension());
break;
case JSONP_ROW_FORMAT:
output = new StreamedOutput(out, new JSONRowFormatter());
ResponseUtil.setJSONPHeader(response,
"result.json" + getExtension());
break;
case JSON_COUNT_FORMAT:
output = new StreamedOutput(out, new JSONCountFormatter());
filename = "resultcount.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_COUNT_FORMAT:
output = new StreamedOutput(out, new JSONCountFormatter());
filename = "resultcount.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
default:
output = getDefaultOutput(out, os);
}
if (!isUncompressed()) {
filename += getExtension();
ResponseUtil.setGzippedHeader(response, filename);
if (isZip()) {
try {
((ZipOutputStream) os).putNextEntry(new ZipEntry(filename));
} catch (IOException e) {
throw new InternalErrorException(e);
}
}
}
}
/**
* @return The default file name for this service. (default = "result.tsv")
*/
protected String getDefaultFileName() {
return "result.tsv";
}
/**
* Make the default output for this service.
* @param out The response's PrintWriter.
* @param os The Response's output stream.
* @return An Output. (default = new StreamedOutput(out, new TabFormatter()))
*/
protected Output getDefaultOutput(PrintWriter out, OutputStream os) {
output = new StreamedOutput(out, new TabFormatter());
ResponseUtil.setTabHeader(response, getDefaultFileName());
return output;
}
/**
* Returns true if the request wants column headers as well as result rows
* @return true if the request declares it wants column headers
*/
public boolean wantsColumnHeaders() {
String wantsCols = request.getParameter(WebServiceRequestParser.ADD_HEADER_PARAMETER);
boolean no = (wantsCols == null || wantsCols.isEmpty() || "0".equals(wantsCols));
return !no;
}
/**
* Get an enum which represents the column header style (path, friendly, or none)
* @return a column header style
*/
public ColumnHeaderStyle getColumnHeaderStyle() {
if (wantsColumnHeaders()) {
String style = request.getParameter(WebServiceRequestParser.ADD_HEADER_PARAMETER);
if ("path".equalsIgnoreCase(style)) {
return ColumnHeaderStyle.PATH;
} else {
return ColumnHeaderStyle.FRIENDLY;
}
} else {
return ColumnHeaderStyle.NONE;
}
}
/**
* Parse a format from the path-info of the request.
* By default, if the path-info is one of "xml", "json", "jsonp", "tsv" or "csv",
* then an appropriate format will be returned. All other values will cause
* null to be returned.
* @return A format string.
*/
protected String parseFormatFromPathInfo() {
String pathInfo = request.getPathInfo();
pathInfo = StringUtil.trimSlashes(pathInfo);
if ("xml".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_XML;
} else if ("json".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_JSON;
} else if ("jsonp".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_JSONP;
} else if ("tsv".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_TAB;
} else if ("csv".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_CSV;
}
return null;
}
/**
* @return The default format constant for this service.
*/
protected int getDefaultFormat() {
return EMPTY_FORMAT;
}
/**
* Returns required output format.
*
* @return format
*/
public int getFormat() {
String format;
if (request.getPathInfo() != null) {
format = parseFormatFromPathInfo();
} else {
format = request.getParameter(WebServiceRequestParser.OUTPUT_PARAMETER);
}
if (StringUtils.isEmpty(format)) {
return getDefaultFormat();
}
if (WebServiceRequestParser.FORMAT_PARAMETER_XML
.equalsIgnoreCase(format)) {
return XML_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_HTML
.equalsIgnoreCase(format)) {
return HTML_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_TAB
.equalsIgnoreCase(format)) {
return TSV_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_CSV
.equalsIgnoreCase(format)) {
return CSV_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_COUNT
.equalsIgnoreCase(format)) {
return COUNT_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_OBJ
.equalsIgnoreCase(format)) {
return JSON_OBJ_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_OBJ
.equalsIgnoreCase(format)) {
return JSONP_OBJ_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_TABLE
.equalsIgnoreCase(format)) {
return JSON_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_TABLE
.equalsIgnoreCase(format)) {
return JSONP_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_ROW
.equalsIgnoreCase(format)) {
return JSON_ROW_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_ROW
.equalsIgnoreCase(format)) {
return JSONP_ROW_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP
.equalsIgnoreCase(format)) {
return JSONP_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON
.equalsIgnoreCase(format)) {
return JSON_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_DATA_TABLE
.equalsIgnoreCase(format)) {
return JSON_DATA_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_DATA_TABLE
.equalsIgnoreCase(format)) {
return JSONP_DATA_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_COUNT
.equalsIgnoreCase(format)) {
return JSONP_COUNT_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_COUNT
.equalsIgnoreCase(format)) {
return JSON_COUNT_FORMAT;
}
return getDefaultFormat();
}
/**
* Get the value of the callback parameter.
* @return The value, or null if this request type does not support this.
*/
public String getCallback() {
if (formatIsJSONP()) {
if (!hasCallback()) {
return DEFAULT_CALLBACK;
} else {
return request.getParameter(
WebServiceRequestParser.CALLBACK_PARAMETER);
}
} else {
return null;
}
}
/**
* Determine whether a callback was supplied to this request.
* @return Whether or not a callback was supplied.
*/
public boolean hasCallback() {
String cb = request.getParameter(
WebServiceRequestParser.CALLBACK_PARAMETER);
return (cb != null && !"".equals(cb));
}
/**
* Runs service. This is abstract method, that must be defined in subclasses
* and so performs something useful. Standard procedure is overwrite this
* method in subclasses and let this method to be called from
* WebService.doGet method that encapsulates logic common for all web
* services else you can overwrite doGet method in your web service class
* and manage all the things alone.
*
* @throws Exception if some error occurs
*/
protected abstract void execute() throws Exception;
/**
* Returns dispatcher that forwards to the page that displays results as a
* html page.
*
* @return dispatcher
*/
public RequestDispatcher getHtmlForward() {
return request.getSession().getServletContext()
.getRequestDispatcher(FORWARD_PATH);
}
/**
* @return true if request specified user name and password
*/
public boolean isAuthenticated() {
return permission != null;
}
}
| intermine/web/main/src/org/intermine/webservice/server/WebService.java | package org.intermine.webservice.server;
/*
* Copyright (C) 2002-2011 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.servlet.RequestDispatcher;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.intermine.api.InterMineAPI;
import org.intermine.api.profile.Profile;
import org.intermine.api.profile.ProfileManager;
import org.intermine.api.profile.ProfileManager.ApiPermission;
import org.intermine.api.profile.ProfileManager.AuthenticationException;
import org.intermine.util.StringUtil;
import org.intermine.web.logic.export.ResponseUtil;
import org.intermine.web.logic.profile.LoginHandler;
import org.intermine.web.logic.session.SessionMethods;
import org.intermine.webservice.server.exceptions.BadRequestException;
import org.intermine.webservice.server.exceptions.InternalErrorException;
import org.intermine.webservice.server.exceptions.ServiceException;
import org.intermine.webservice.server.exceptions.ServiceForbiddenException;
import org.intermine.webservice.server.output.CSVFormatter;
import org.intermine.webservice.server.output.JSONCountFormatter;
import org.intermine.webservice.server.output.JSONFormatter;
import org.intermine.webservice.server.output.JSONObjectFormatter;
import org.intermine.webservice.server.output.JSONResultFormatter;
import org.intermine.webservice.server.output.JSONRowFormatter;
import org.intermine.webservice.server.output.JSONTableFormatter;
import org.intermine.webservice.server.output.MemoryOutput;
import org.intermine.webservice.server.output.Output;
import org.intermine.webservice.server.output.StreamedOutput;
import org.intermine.webservice.server.output.TabFormatter;
import org.intermine.webservice.server.output.XMLFormatter;
/**
*
* Base class for web services. See methods of class to be able implement
* subclass. <h3>Output</h3> There can be 3 types of output:
* <ul>
* <li>Only Error output
* <li>Complete results - xml, tab separated, html
* <li>Incomplete results - error messages are appended at the end
* </ul>
*
* <h3>Web service design</h3>
* <ul>
* <li>Request is parsed with corresponding RequestProcessor class and returned
* as a corresponding Input class.
* <li>Web services are subclasses of WebService class.
* <li>Web services use implementations of Output class to print results.
* <li>Request parameter names are constants in corresponding
* RequestProcessorBase subclass.
* <li>Servlets are used only for forwarding to corresponding web service, that
* is created always new. With this implementation fields of new service are
* correctly initialized and there don't stay values from previous requests.
* </ul>
* For using of web services see InterMine wiki pages.
*
* @author Jakub Kulaviak
* @author Alex Kalderimis
* @version
*/
public abstract class WebService
{
/** Default jsonp callback **/
public static final String DEFAULT_CALLBACK = "callback";
/** The format for when no value is given **/
public static final int EMPTY_FORMAT = -1;
/** The Unknown format **/
public static final int UNKNOWN_FORMAT = -2;
/** XML format constant **/
public static final int XML_FORMAT = 0;
/** TSV format constant **/
public static final int TSV_FORMAT = 1;
/** HTML format constant **/
public static final int HTML_FORMAT = 2;
/** CSV format constant **/
public static final int CSV_FORMAT = 3;
/** Count format constant **/
public static final int COUNT_FORMAT = 4;
// FORMAT CONSTANTS BETWEEN 20-40 ARE RESERVED FOR JSON FORMATS!!
/** Start of JSON format range **/
public static final int JSON_RANGE_START = 20;
/** End of JSON format range **/
public static final int JSON_RANGE_END = 40;
/** JSONP format constant **/
public static final int JSON_FORMAT = 20;
/** JSONP format constant **/
public static final int JSONP_FORMAT = 21;
/** JSON Object format constant **/
public static final int JSON_OBJ_FORMAT = 22;
/** JSONP Object format constant **/
public static final int JSONP_OBJ_FORMAT = 23;
/** JSON Table format constant **/
public static final int JSON_TABLE_FORMAT = 24;
/** JSONP Table format constant **/
public static final int JSONP_TABLE_FORMAT = 25;
/** JSON Row format constant **/
public static final int JSON_ROW_FORMAT = 26;
/** JSONP Row format constant **/
public static final int JSONP_ROW_FORMAT = 27;
/** JSON count format constant **/
public static final int JSON_COUNT_FORMAT = 28;
/** JSONP count format constant **/
public static final int JSONP_COUNT_FORMAT = 29;
/** JSON data table format constant **/
public static final int JSON_DATA_TABLE_FORMAT = 30;
/** JSONP data table format constant **/
public static final int JSONP_DATA_TABLE_FORMAT = 31;
private static final String COMPRESS = "compress";
private static final String GZIP = "gzip";
private static final String ZIP = "zip";
private static final String WEB_SERVICE_DISABLED_PROPERTY = "webservice.disabled";
private static final Logger LOG = Logger.getLogger(WebService.class);
private static final String FORWARD_PATH = "/webservice/table.jsp";
private static final String AUTHENTICATION_FIELD_NAME = "Authorization";
private static final String AUTH_TOKEN_PARAM_KEY = "token";
private static final Profile ANON_PROFILE = new AnonProfile();
protected HttpServletRequest request;
protected HttpServletResponse response;
protected Output output;
protected InterMineAPI im;
private ApiPermission permission = null;
/**
* Construct the web service with the InterMine API object that gives access
* to the core InterMine functionality.
*
* @param im the InterMine application
*/
public WebService(InterMineAPI im) {
this.im = im;
}
/**
* Starting method of web service. The web service should be run like
*
* <pre>
* new ListsService().service(request, response);
* </pre>
*
* Ensures initialisation of web service and makes steps common for all web
* services and after that executes the <tt>execute</tt> method, that should be
* overwritten with each web service.
*
* @param request The request, as received by the servlet.
* @param response The response, as handled by the servlet.
*/
public void service(HttpServletRequest request, HttpServletResponse response) {
try {
this.request = request;
try {
request.setCharacterEncoding("UTF-8");
} catch (UnsupportedEncodingException ex) {
LOG.error(ex);
}
this.response = response;
response.setHeader("Access-Control-Allow-Origin", "*"); // Allow cross domain requests.
initOutput(response);
checkEnabled();
authenticate();
initState();
validateState();
execute();
} catch (Throwable t) {
sendError(t, response);
}
try {
output.flush();
} catch (Throwable t) {
logError(t, "Error flushing", 500);
}
try {
cleanUp();
} catch (Throwable t) {
LOG.error("Error cleaning up", t);
}
// Do not persist sessions. All requests should be state-less.
request.getSession().invalidate();
}
private void checkEnabled() {
Properties webProperties = SessionMethods.getWebProperties(request
.getSession().getServletContext());
if ("true".equalsIgnoreCase(webProperties
.getProperty(WEB_SERVICE_DISABLED_PROPERTY))) {
throw new ServiceForbiddenException("Web service is disabled.");
}
}
/**
* Subclasses may put clean-up code here, to be run after the request has been executed.
*/
protected void cleanUp() {
// No-op stub.
}
/**
* Subclasses can put initialisation here.
*/
protected void initState() {
// No-op stub
}
/**
* Subclasses can put initialisation checks here.
* The main use case is for confirming
* authentication.
*/
protected void validateState() {
// No-op stub
}
/**
* If user name and password is specified in request, then it setups user
* profile in session. User was authenticated. It uses HTTP basic access
* authentication.
* {@link "http://en.wikipedia.org/wiki/Basic_access_authentication"}
*/
private void authenticate() {
final String authToken = request.getParameter(AUTH_TOKEN_PARAM_KEY);
final ProfileManager pm = im.getProfileManager();
final HttpSession session = request.getSession();
// Anonymous requests get the anonymous profile.
SessionMethods.setProfile(session, ANON_PROFILE);
try {
if (StringUtils.isEmpty(authToken)) {
final String authString = request.getHeader(AUTHENTICATION_FIELD_NAME);
if (StringUtils.isEmpty(authString) || formatIsJSONP()) {
return; // Not Authenticated.
}
// Strip off the "Basic" part - but don't require it.
final String encoded = authString.substring(authString.indexOf("Basic ") + 1);
final String decoded = new String(Base64.decodeBase64(encoded.getBytes()));
final String[] parts = decoded.split(":", 2);
if (parts.length != 2) {
throw new BadRequestException(
"Invalid request authentication. "
+ "Authorization field contains invalid value. "
+ "Decoded authorization value: " + parts[0]);
}
final String username = parts[0];
final String password = parts[1];
permission = pm.getPermission(username, password, im.getClassKeys());
} else {
permission = pm.getPermission(authToken, im.getClassKeys());
}
} catch (AuthenticationException e) {
throw new ServiceForbiddenException(e.getMessage(), e);
}
LoginHandler.setUpProfile(session, permission.getProfile());
}
private void sendError(Throwable t, HttpServletResponse response) {
String msg = WebServiceConstants.SERVICE_FAILED_MSG;
if (t.getMessage() != null && t.getMessage().length() >= 0) {
msg = t.getMessage();
}
int code;
if (t instanceof ServiceException) {
ServiceException ex = (ServiceException) t;
code = ex.getHttpErrorCode();
} else {
code = Output.SC_INTERNAL_SERVER_ERROR;
}
logError(t, msg, code);
if (!formatIsJSONP()) {
// Don't set errors statuses on jsonp requests, to enable
// better error checking in the browser.
response.setStatus(code);
} else {
// But do set callbacks
String callback = getCallback();
if (callback == null) {
callback = "makeInterMineResultsTable";
}
Map<String, Object> attributes = new HashMap<String, Object>();
attributes.put(JSONResultFormatter.KEY_CALLBACK, callback);
output.setHeaderAttributes(attributes);
}
output.setError(msg, code);
LOG.debug("Set error to : " + msg + "," + code);
}
private void logError(Throwable t, String msg, int code) {
// Stack traces for all!
ByteArrayOutputStream b = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(b);
t.printStackTrace(ps);
ps.flush();
if (code == Output.SC_INTERNAL_SERVER_ERROR) {
LOG.error("Service failed by internal error. Request parameters: \n"
+ requestParametersToString() + b.toString());
} else {
LOG.debug("Service didn't succeed. It's not an internal error. "
+ "Reason: " + getErrorDescription(msg, code) + "\n" + b.toString());
}
}
private String requestParametersToString() {
StringBuilder sb = new StringBuilder();
Map<String, String[]> map = request.getParameterMap();
for (String name : map.keySet()) {
for (String value : map.get(name)) {
sb.append(name);
sb.append(": ");
sb.append(value);
sb.append("\n");
}
}
return sb.toString();
}
private String getErrorDescription(String msg, int errorCode) {
StringBuilder sb = new StringBuilder();
sb.append(StatusDictionary.getDescription(errorCode));
sb.append(msg);
return sb.toString();
}
/**
* @return Whether or not the requested result format is one of our JSON formats.
*/
protected boolean formatIsJSON() {
int format = getFormat();
return (format >= JSON_RANGE_START && format <= JSON_RANGE_END);
}
/**
* @return Whether or not the format is a JSON-P format
*/
protected boolean formatIsJSONP() {
return formatIsJSON() && (getFormat() % 2 == 1);
}
/**
* @return Whether or not the format is for JSON-Objects
*/
protected boolean formatIsJsonObj() {
int format = getFormat();
return (format == JSON_OBJ_FORMAT || format == JSONP_OBJ_FORMAT);
}
/**
* @return Whether or not the format is a flat-file format
*/
protected boolean formatIsFlatFile() {
int format = getFormat();
return (format == TSV_FORMAT || format == CSV_FORMAT);
}
/**
* Returns true if the format requires the count, rather than the full or
* paged result set.
* @return a truth value
*/
public boolean formatIsCount() {
int format = getFormat();
switch (format) {
case COUNT_FORMAT:
return true;
case JSONP_COUNT_FORMAT:
return true;
case JSON_COUNT_FORMAT:
return true;
default:
return false;
}
}
/**
* @return Whether or not the format is XML.
*/
public boolean formatIsXML() {
return (getFormat() == XML_FORMAT);
}
/**
* Make the XML output given the HttpResponse's PrintWriter.
* @param out The PrintWriter from the HttpResponse.
* @return An Output that produces good XML.
*/
protected Output makeXMLOutput(PrintWriter out) {
ResponseUtil.setXMLHeader(response, "result.xml");
return new StreamedOutput(out, new XMLFormatter());
}
/**
* Make the default JSON output given the HttpResponse's PrintWriter.
* @param out The PrintWriter from the HttpResponse.
* @return An Output that produces good JSON.
*/
protected Output makeJSONOutput(PrintWriter out) {
return new StreamedOutput(out, new JSONFormatter());
}
/**
* @return Whether or not this request wants gzipped data.
*/
protected boolean isGzip() {
return GZIP.equalsIgnoreCase(request.getParameter(COMPRESS));
}
/**
* @return Whether or not this request wants zipped data.
*/
protected boolean isZip() {
return ZIP.equalsIgnoreCase(request.getParameter(COMPRESS));
}
/**
* @return Whether or not this request wants uncompressed data.
*/
protected boolean isUncompressed() {
return StringUtils.isEmpty(request.getParameter(COMPRESS));
}
/**
* @return the file-name extension for the result-set.
*/
protected String getExtension() {
if (isGzip()) {
return ".gz";
} else if (isZip()) {
return ".zip";
} else {
return "";
}
}
private void initOutput(HttpServletResponse response) {
int format = getFormat();
// HTML is a special case
if (format == HTML_FORMAT) {
output = new MemoryOutput();
ResponseUtil.setHTMLContentType(response);
return;
}
PrintWriter out;
OutputStream os;
try {
// set reasonable buffer size
response.setBufferSize(8 * 1024);
os = response.getOutputStream();
if (isGzip()) {
os = new GZIPOutputStream(os);
} else if (isZip()) {
os = new ZipOutputStream(new BufferedOutputStream(os));
}
out = new PrintWriter(os);
} catch (IOException e) {
throw new InternalErrorException(e);
}
String filename = getDefaultFileName();
switch (format) {
case XML_FORMAT:
output = makeXMLOutput(out);
break;
case TSV_FORMAT:
output = new StreamedOutput(out, new TabFormatter());
filename = "result.tsv";
if (isUncompressed()) {
ResponseUtil.setTabHeader(response, filename);
}
break;
case CSV_FORMAT:
output = new StreamedOutput(out, new CSVFormatter());
filename = "result.csv";
if (isUncompressed()) {
ResponseUtil.setCSVHeader(response, filename);
}
break;
case COUNT_FORMAT:
output = new StreamedOutput(out, new TabFormatter());
filename = "resultcount.txt";
if (isUncompressed()) {
ResponseUtil.setPlainTextHeader(response, filename);
}
break;
case JSON_FORMAT:
output = makeJSONOutput(out);
filename = "result.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_FORMAT:
output = makeJSONOutput(out);
filename = "result.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_OBJ_FORMAT:
output = new StreamedOutput(out, new JSONObjectFormatter());
filename = "result.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_OBJ_FORMAT:
output = new StreamedOutput(out, new JSONObjectFormatter());
filename = "result.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_DATA_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_DATA_TABLE_FORMAT:
output = new StreamedOutput(out, new JSONTableFormatter());
filename = "resulttable.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
case JSON_ROW_FORMAT:
output = new StreamedOutput(out, new JSONRowFormatter());
ResponseUtil.setJSONHeader(response,
"result.json" + getExtension());
break;
case JSONP_ROW_FORMAT:
output = new StreamedOutput(out, new JSONRowFormatter());
ResponseUtil.setJSONPHeader(response,
"result.json" + getExtension());
break;
case JSON_COUNT_FORMAT:
output = new StreamedOutput(out, new JSONCountFormatter());
filename = "resultcount.json";
if (isUncompressed()) {
ResponseUtil.setJSONHeader(response, filename);
}
break;
case JSONP_COUNT_FORMAT:
output = new StreamedOutput(out, new JSONCountFormatter());
filename = "resultcount.jsonp";
if (isUncompressed()) {
ResponseUtil.setJSONPHeader(response, filename);
}
break;
default:
output = getDefaultOutput(out, os);
}
if (!isUncompressed()) {
filename += getExtension();
ResponseUtil.setGzippedHeader(response, filename);
if (isZip()) {
try {
((ZipOutputStream) os).putNextEntry(new ZipEntry(filename));
} catch (IOException e) {
throw new InternalErrorException(e);
}
}
}
}
/**
* @return The default file name for this service. (default = "result.tsv")
*/
protected String getDefaultFileName() {
return "result.tsv";
}
/**
* Make the default output for this service.
* @param out The response's PrintWriter.
* @param os The Response's output stream.
* @return An Output. (default = new StreamedOutput(out, new TabFormatter()))
*/
protected Output getDefaultOutput(PrintWriter out, OutputStream os) {
output = new StreamedOutput(out, new TabFormatter());
ResponseUtil.setTabHeader(response, getDefaultFileName());
return output;
}
/**
* Returns true if the request wants column headers as well as result rows
* @return true if the request declares it wants column headers
*/
public boolean wantsColumnHeaders() {
String wantsCols = request.getParameter(WebServiceRequestParser.ADD_HEADER_PARAMETER);
boolean no = (wantsCols == null || wantsCols.isEmpty() || "0".equals(wantsCols));
return !no;
}
/**
* Get an enum which represents the column header style (path, friendly, or none)
* @return a column header style
*/
public ColumnHeaderStyle getColumnHeaderStyle() {
if (wantsColumnHeaders()) {
String style = request.getParameter(WebServiceRequestParser.ADD_HEADER_PARAMETER);
if ("path".equalsIgnoreCase(style)) {
return ColumnHeaderStyle.PATH;
} else {
return ColumnHeaderStyle.FRIENDLY;
}
} else {
return ColumnHeaderStyle.NONE;
}
}
/**
* Parse a format from the path-info of the request.
* By default, if the path-info is one of "xml", "json", "jsonp", "tsv" or "csv",
* then an appropriate format will be returned. All other values will cause
* null to be returned.
* @return A format string.
*/
protected String parseFormatFromPathInfo() {
String pathInfo = request.getPathInfo();
pathInfo = StringUtil.trimSlashes(pathInfo);
if ("xml".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_XML;
} else if ("json".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_JSON;
} else if ("jsonp".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_JSONP;
} else if ("tsv".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_TAB;
} else if ("csv".equalsIgnoreCase(pathInfo)) {
return WebServiceRequestParser.FORMAT_PARAMETER_CSV;
}
return null;
}
/**
* @return The default format constant for this service.
*/
protected int getDefaultFormat() {
return EMPTY_FORMAT;
}
/**
* Returns required output format.
*
* @return format
*/
public int getFormat() {
String format;
if (request.getPathInfo() != null) {
format = parseFormatFromPathInfo();
} else {
format = request.getParameter(WebServiceRequestParser.OUTPUT_PARAMETER);
}
if (StringUtils.isEmpty(format)) {
return getDefaultFormat();
}
if (WebServiceRequestParser.FORMAT_PARAMETER_XML
.equalsIgnoreCase(format)) {
return XML_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_HTML
.equalsIgnoreCase(format)) {
return HTML_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_TAB
.equalsIgnoreCase(format)) {
return TSV_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_CSV
.equalsIgnoreCase(format)) {
return CSV_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_COUNT
.equalsIgnoreCase(format)) {
return COUNT_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_OBJ
.equalsIgnoreCase(format)) {
return JSON_OBJ_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_OBJ
.equalsIgnoreCase(format)) {
return JSONP_OBJ_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_TABLE
.equalsIgnoreCase(format)) {
return JSON_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_TABLE
.equalsIgnoreCase(format)) {
return JSONP_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_ROW
.equalsIgnoreCase(format)) {
return JSON_ROW_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_ROW
.equalsIgnoreCase(format)) {
return JSONP_ROW_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP
.equalsIgnoreCase(format)) {
return JSONP_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON
.equalsIgnoreCase(format)) {
return JSON_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_DATA_TABLE
.equalsIgnoreCase(format)) {
return JSON_DATA_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_DATA_TABLE
.equalsIgnoreCase(format)) {
return JSONP_DATA_TABLE_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSONP_COUNT
.equalsIgnoreCase(format)) {
return JSONP_COUNT_FORMAT;
}
if (WebServiceRequestParser.FORMAT_PARAMETER_JSON_COUNT
.equalsIgnoreCase(format)) {
return JSON_COUNT_FORMAT;
}
return getDefaultFormat();
}
/**
* Get the value of the callback parameter.
* @return The value, or null if this request type does not support this.
*/
public String getCallback() {
if (formatIsJSONP()) {
if (!hasCallback()) {
return DEFAULT_CALLBACK;
} else {
return request.getParameter(
WebServiceRequestParser.CALLBACK_PARAMETER);
}
} else {
return null;
}
}
/**
* Determine whether a callback was supplied to this request.
* @return Whether or not a callback was supplied.
*/
public boolean hasCallback() {
String cb = request.getParameter(
WebServiceRequestParser.CALLBACK_PARAMETER);
return (cb != null && !"".equals(cb));
}
/**
* Runs service. This is abstract method, that must be defined in subclasses
* and so performs something useful. Standard procedure is overwrite this
* method in subclasses and let this method to be called from
* WebService.doGet method that encapsulates logic common for all web
* services else you can overwrite doGet method in your web service class
* and manage all the things alone.
*
* @throws Exception if some error occurs
*/
protected abstract void execute() throws Exception;
/**
* Returns dispatcher that forwards to the page that displays results as a
* html page.
*
* @return dispatcher
*/
public RequestDispatcher getHtmlForward() {
return request.getSession().getServletContext()
.getRequestDispatcher(FORWARD_PATH);
}
/**
* @return true if request specified user name and password
*/
public boolean isAuthenticated() {
return permission != null;
}
}
| Lowered priority of ws log messages to debug
Former-commit-id: c219759b43729f7dea962ad152643cd44ae603f3 | intermine/web/main/src/org/intermine/webservice/server/WebService.java | Lowered priority of ws log messages to debug |
|
Java | lgpl-2.1 | b8e5b618b9eb92564a88a081efbf82f064bd8c6f | 0 | getrailo/railo,getrailo/railo,JordanReiter/railo,JordanReiter/railo,modius/railo,modius/railo,getrailo/railo | package railo.runtime.type.scope;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import railo.runtime.PageContext;
import railo.runtime.config.ConfigImpl;
import railo.runtime.converter.ConverterException;
import railo.runtime.converter.ScriptConverter;
import railo.runtime.db.DatasourceConnection;
import railo.runtime.db.DatasourceConnectionPool;
import railo.runtime.db.SQL;
import railo.runtime.db.SQLCaster;
import railo.runtime.db.SQLImpl;
import railo.runtime.db.SQLItem;
import railo.runtime.db.SQLItemImpl;
import railo.runtime.dump.DumpData;
import railo.runtime.dump.DumpProperties;
import railo.runtime.dump.DumpTable;
import railo.runtime.engine.ThreadLocalPageContext;
import railo.runtime.exp.DatabaseException;
import railo.runtime.exp.PageException;
import railo.runtime.op.Caster;
import railo.runtime.type.Collection;
import railo.runtime.type.KeyImpl;
import railo.runtime.type.Query;
import railo.runtime.type.QueryImpl;
import railo.runtime.type.Struct;
import railo.runtime.type.StructImpl;
import railo.runtime.type.dt.DateTime;
import railo.runtime.type.dt.DateTimeImpl;
/**
* client scope that store it's data in a datasource
*/
public final class ClientDatasource extends ClientSupport {
private static final long serialVersionUID = 239179599401918216L;
private static final Collection.Key DATA = KeyImpl.getInstance("data");
private static boolean structOk;
private String datasourceName;
/**
* Constructor of the class
* @param pc
* @param name
* @param sct
* @param b
*/
private ClientDatasource(PageContext pc,String datasourceName, Struct sct) {
super(
sct,
doNowIfNull(pc,Caster.toDate(sct.get(TIMECREATED,null),false,pc.getTimeZone(),null)),
doNowIfNull(pc,Caster.toDate(sct.get(LASTVISIT,null),false,pc.getTimeZone(),null)),
-1,
Caster.toIntValue(sct.get(HITCOUNT,"1"),1));
//this.isNew=isNew;
this.datasourceName=datasourceName;
//this.manager = (DatasourceManagerImpl) pc.getDataSourceManager();
}
/**
* Constructor of the class, clone existing
* @param other
*/
private ClientDatasource(ClientDatasource other,boolean deepCopy) {
super(other,deepCopy);
this.datasourceName=other.datasourceName;
//this.manager=other.manager;
}
private static DateTime doNowIfNull(PageContext pc,DateTime dt) {
if(dt==null)return new DateTimeImpl(pc.getConfig());
return dt;
}
/**
* load an new instance of the client datasource scope
* @param datasourceName
* @param appName
* @param pc
* @return client datasource scope
* @throws PageException
*/
public static Client getInstance(String datasourceName, PageContext pc) throws PageException {
Struct _sct = _loadData(pc, datasourceName, false);
structOk=true;
if(_sct==null) _sct=new StructImpl();
return new ClientDatasource(pc,datasourceName,_sct);
}
public static Client getInstanceEL(String datasourceName, PageContext pc) {
try {
return getInstance(datasourceName, pc);
}
catch (PageException e) {}
return new ClientDatasource(pc,datasourceName,new StructImpl());
}
private static Struct _loadData(PageContext pc, String datasourceName, boolean mxStyle) throws PageException {
DatasourceConnection dc=null;
Query query=null;
// select
SQL sqlSelect=mxStyle?
new SQLImpl("mx"):
new SQLImpl("select data from railo_client_data where cfid=? and name=?"
,new SQLItem[]{
new SQLItemImpl(pc.getCFID(),Types.VARCHAR),
new SQLItemImpl(pc.getApplicationContext().getName(),Types.VARCHAR)
});
ConfigImpl config = (ConfigImpl)pc.getConfig();
//int pid=1000;
DatasourceConnectionPool pool = config.getDatasourceConnectionPool();
try {
dc=pool.getDatasourceConnection(pc,config.getDataSource(datasourceName),null,null);
query = new QueryImpl(dc,sqlSelect,-1,-1,-1,"query");
}
catch (DatabaseException de) {
if(dc==null) throw de;
try {
new QueryImpl(dc,createSQL(dc,mxStyle,"text"),-1,-1,-1,"query");
}
catch (DatabaseException _de) {
try {
new QueryImpl(dc,createSQL(dc,mxStyle,"memo"),-1,-1,-1,"query");
}
catch (DatabaseException __de) {
new QueryImpl(dc,createSQL(dc,mxStyle,"clob"),-1,-1,-1,"query");
}
}
query = new QueryImpl(dc,sqlSelect,-1,-1,-1,"query");
}
finally {
if(dc!=null) pool.releaseDatasourceConnection(dc);
}
pc.getDebugger().addQueryExecutionTime(datasourceName,"",sqlSelect,query.getRecordcount(),pc.getCurrentPageSource(),query.executionTime());
boolean _isNew = query.getRecordcount()==0;
if(_isNew) return null;
String str=Caster.toString(query.get(DATA));
if(mxStyle) return null;
return (Struct)pc.evaluate(str);
}
/**
*
* @see railo.runtime.type.scope.ClientSupport#release()
*/
public void release() {
release(ThreadLocalPageContext.get());
}
public void release(PageContext pc) {
structOk=false;
super.release();
if(!super.hasContent()) return;
DatasourceConnection dc = null;
ConfigImpl config = (ConfigImpl)pc.getConfig();
//int pid=1000;//pc.getId()+10000;
DatasourceConnectionPool pool = config.getDatasourceConnectionPool();
try {
dc=pool.getDatasourceConnection(pc,config.getDataSource(datasourceName),null,null);
int recordsAffected = executeUpdate(pc,dc.getConnection(),"update railo_client_data set data=? where cfid=? and name=?",false);
if(recordsAffected>1) {
executeUpdate(pc,dc.getConnection(),"delete from railo_client_data where cfid=? and name=?",true);
recordsAffected=0;
}
if(recordsAffected==0) {
executeUpdate(pc,dc.getConnection(),"insert into railo_client_data (data,cfid,name) values(?,?,?)",false);
}
}
catch (Exception e) {}
finally {
if(dc!=null) pool.releaseDatasourceConnection(dc);
}
}
private int executeUpdate(PageContext pc,Connection conn, String strSQL, boolean ignoreData) throws SQLException, PageException, ConverterException {
String appName = pc.getApplicationContext().getName();
SQLImpl sql = new SQLImpl(strSQL,new SQLItem[]{
new SQLItemImpl(new ScriptConverter().serializeStruct(sct,ignoreSet),Types.VARCHAR),
new SQLItemImpl(pc.getCFID(),Types.VARCHAR),
new SQLItemImpl(appName,Types.VARCHAR)
});
if(ignoreData)sql = new SQLImpl(strSQL,new SQLItem[]{
new SQLItemImpl(pc.getCFID(),Types.VARCHAR),
new SQLItemImpl(appName,Types.VARCHAR)
});
//print.out(sql);
PreparedStatement preStat = conn.prepareStatement(sql.getSQLString());
int count=0;
try {
SQLItem[] items=sql.getItems();
for(int i=0;i<items.length;i++) {
SQLCaster.setValue(preStat,i+1,items[i]);
}
count= preStat.executeUpdate();
}
finally {
preStat.close();
}
return count;
}
private static SQL createSQL(DatasourceConnection dc, boolean mxStyle, String textType) {
String clazz = dc.getDatasource().getClazz().getName();
boolean isMSSQL=
clazz.equals("com.microsoft.jdbc.sqlserver.SQLServerDriver") ||
clazz.equals("net.sourceforge.jtds.jdbc.Driver");
boolean isHSQLDB=
clazz.equals("org.hsqldb.jdbcDriver");
boolean isOracle=
clazz.indexOf("OracleDriver")!=-1;
StringBuffer sb=new StringBuffer("CREATE TABLE ");
if(mxStyle) {}
else {
if(isMSSQL)sb.append("dbo.");
sb.append("railo_client_data (");
// cfid
sb.append("cfid varchar(64) NOT NULL, ");
// name
sb.append("name varchar(255) NOT NULL, ");
// data
sb.append("data ");
if(isHSQLDB)sb.append("varchar ");
else if(isOracle)sb.append("CLOB ");
else sb.append(textType+" ");
sb.append(" NOT NULL");
}
sb.append(")");
return new SQLImpl(sb.toString());
}
/**
* @see railo.runtime.dump.Dumpable#toDumpData(railo.runtime.PageContext, int)
*/
public DumpData toDumpData(PageContext pageContext, int maxlevel, DumpProperties dp) {
DumpTable table = super.toDumpTable(pageContext, maxlevel,dp);
table.setTitle("Scope Client (Datasource)");
return table;
}
/**
*
* @see railo.runtime.type.Collection#duplicate(boolean)
*/
public Collection duplicate(boolean deepCopy) {
return new ClientDatasource(this,deepCopy);
}
/**
*
* @see railo.runtime.type.scope.ClientSupport#initialize(railo.runtime.PageContext)
*/
public void initialize(PageContext pc) {
try {
if(!structOk)sct=_loadData(pc, datasourceName, false);
} catch (PageException e) {
//
}
super.initialize(pc);
}
}
| railo-java/railo-core/src/railo/runtime/type/scope/ClientDatasource.java | package railo.runtime.type.scope;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import railo.runtime.PageContext;
import railo.runtime.config.ConfigImpl;
import railo.runtime.converter.ConverterException;
import railo.runtime.converter.ScriptConverter;
import railo.runtime.db.DatasourceConnection;
import railo.runtime.db.DatasourceConnectionPool;
import railo.runtime.db.SQL;
import railo.runtime.db.SQLCaster;
import railo.runtime.db.SQLImpl;
import railo.runtime.db.SQLItem;
import railo.runtime.db.SQLItemImpl;
import railo.runtime.dump.DumpData;
import railo.runtime.dump.DumpProperties;
import railo.runtime.dump.DumpTable;
import railo.runtime.exp.DatabaseException;
import railo.runtime.exp.PageException;
import railo.runtime.op.Caster;
import railo.runtime.type.Collection;
import railo.runtime.type.KeyImpl;
import railo.runtime.type.Query;
import railo.runtime.type.QueryImpl;
import railo.runtime.type.Struct;
import railo.runtime.type.StructImpl;
import railo.runtime.type.dt.DateTime;
import railo.runtime.type.dt.DateTimeImpl;
/**
* client scope that store it's data in a datasource
*/
public final class ClientDatasource extends ClientSupport {
private static final long serialVersionUID = 239179599401918216L;
private static final Collection.Key DATA = KeyImpl.getInstance("data");
private static boolean structOk;
private String datasourceName;
private PageContext pc;
/**
* Constructor of the class
* @param pc
* @param name
* @param sct
* @param b
*/
private ClientDatasource(PageContext pc,String datasourceName, Struct sct) {
super(
sct,
doNowIfNull(pc,Caster.toDate(sct.get(TIMECREATED,null),false,pc.getTimeZone(),null)),
doNowIfNull(pc,Caster.toDate(sct.get(LASTVISIT,null),false,pc.getTimeZone(),null)),
-1,
Caster.toIntValue(sct.get(HITCOUNT,"1"),1));
//this.isNew=isNew;
this.datasourceName=datasourceName;
//this.manager = (DatasourceManagerImpl) pc.getDataSourceManager();
}
/**
* Constructor of the class, clone existing
* @param other
*/
private ClientDatasource(ClientDatasource other,boolean deepCopy) {
super(other,deepCopy);
this.datasourceName=other.datasourceName;
this.pc=other.pc;
//this.manager=other.manager;
}
private static DateTime doNowIfNull(PageContext pc,DateTime dt) {
if(dt==null)return new DateTimeImpl(pc.getConfig());
return dt;
}
/**
* load an new instance of the client datasource scope
* @param datasourceName
* @param appName
* @param pc
* @return client datasource scope
* @throws PageException
*/
public static Client getInstance(String datasourceName, PageContext pc) throws PageException {
Struct _sct = _loadData(pc, datasourceName, false);
structOk=true;
if(_sct==null) _sct=new StructImpl();
return new ClientDatasource(pc,datasourceName,_sct);
}
public static Client getInstanceEL(String datasourceName, PageContext pc) {
try {
return getInstance(datasourceName, pc);
}
catch (PageException e) {}
return new ClientDatasource(pc,datasourceName,new StructImpl());
}
private static Struct _loadData(PageContext pc, String datasourceName, boolean mxStyle) throws PageException {
DatasourceConnection dc=null;
Query query=null;
// select
SQL sqlSelect=mxStyle?
new SQLImpl("mx"):
new SQLImpl("select data from railo_client_data where cfid=? and name=?"
,new SQLItem[]{
new SQLItemImpl(pc.getCFID(),Types.VARCHAR),
new SQLItemImpl(pc.getApplicationContext().getName(),Types.VARCHAR)
});
ConfigImpl config = (ConfigImpl)pc.getConfig();
//int pid=1000;
DatasourceConnectionPool pool = config.getDatasourceConnectionPool();
try {
dc=pool.getDatasourceConnection(pc,config.getDataSource(datasourceName),null,null);
query = new QueryImpl(dc,sqlSelect,-1,-1,-1,"query");
}
catch (DatabaseException de) {
if(dc==null) throw de;
try {
new QueryImpl(dc,createSQL(dc,mxStyle,"text"),-1,-1,-1,"query");
}
catch (DatabaseException _de) {
try {
new QueryImpl(dc,createSQL(dc,mxStyle,"memo"),-1,-1,-1,"query");
}
catch (DatabaseException __de) {
new QueryImpl(dc,createSQL(dc,mxStyle,"clob"),-1,-1,-1,"query");
}
}
query = new QueryImpl(dc,sqlSelect,-1,-1,-1,"query");
}
finally {
if(dc!=null) pool.releaseDatasourceConnection(dc);
}
pc.getDebugger().addQueryExecutionTime(datasourceName,"",sqlSelect,query.getRecordcount(),pc.getCurrentPageSource(),query.executionTime());
boolean _isNew = query.getRecordcount()==0;
if(_isNew) return null;
String str=Caster.toString(query.get(DATA));
if(mxStyle) return null;
return (Struct)pc.evaluate(str);
}
/**
*
* @see railo.runtime.type.scope.ClientSupport#release()
*/
public void release() {
structOk=false;
super.release();
if(!super.hasContent()) return;
DatasourceConnection dc = null;
ConfigImpl config = (ConfigImpl)pc.getConfig();
//int pid=1000;//pc.getId()+10000;
DatasourceConnectionPool pool = config.getDatasourceConnectionPool();
try {
dc=pool.getDatasourceConnection(pc,config.getDataSource(datasourceName),null,null);
int recordsAffected = executeUpdate(dc.getConnection(),"update railo_client_data set data=? where cfid=? and name=?",false);
if(recordsAffected>1) {
executeUpdate(dc.getConnection(),"delete from railo_client_data where cfid=? and name=?",true);
recordsAffected=0;
}
if(recordsAffected==0) {
executeUpdate(dc.getConnection(),"insert into railo_client_data (data,cfid,name) values(?,?,?)",false);
}
}
catch (Exception e) {}
finally {
if(dc!=null) pool.releaseDatasourceConnection(dc);
pc=null;
}
}
private int executeUpdate(Connection conn, String strSQL, boolean ignoreData) throws SQLException, PageException, ConverterException {
String appName = pc.getApplicationContext().getName();
SQLImpl sql = new SQLImpl(strSQL,new SQLItem[]{
new SQLItemImpl(new ScriptConverter().serializeStruct(sct,ignoreSet),Types.VARCHAR),
new SQLItemImpl(pc.getCFID(),Types.VARCHAR),
new SQLItemImpl(appName,Types.VARCHAR)
});
if(ignoreData)sql = new SQLImpl(strSQL,new SQLItem[]{
new SQLItemImpl(pc.getCFID(),Types.VARCHAR),
new SQLItemImpl(appName,Types.VARCHAR)
});
//print.out(sql);
PreparedStatement preStat = conn.prepareStatement(sql.getSQLString());
int count=0;
try {
SQLItem[] items=sql.getItems();
for(int i=0;i<items.length;i++) {
SQLCaster.setValue(preStat,i+1,items[i]);
}
count= preStat.executeUpdate();
}
finally {
preStat.close();
}
return count;
}
private static SQL createSQL(DatasourceConnection dc, boolean mxStyle, String textType) {
String clazz = dc.getDatasource().getClazz().getName();
boolean isMSSQL=
clazz.equals("com.microsoft.jdbc.sqlserver.SQLServerDriver") ||
clazz.equals("net.sourceforge.jtds.jdbc.Driver");
boolean isHSQLDB=
clazz.equals("org.hsqldb.jdbcDriver");
boolean isOracle=
clazz.indexOf("OracleDriver")!=-1;
StringBuffer sb=new StringBuffer("CREATE TABLE ");
if(mxStyle) {}
else {
if(isMSSQL)sb.append("dbo.");
sb.append("railo_client_data (");
// cfid
sb.append("cfid varchar(64) NOT NULL, ");
// name
sb.append("name varchar(255) NOT NULL, ");
// data
sb.append("data ");
if(isHSQLDB)sb.append("varchar ");
else if(isOracle)sb.append("CLOB ");
else sb.append(textType+" ");
sb.append(" NOT NULL");
}
sb.append(")");
return new SQLImpl(sb.toString());
}
/**
* @see railo.runtime.dump.Dumpable#toDumpData(railo.runtime.PageContext, int)
*/
public DumpData toDumpData(PageContext pageContext, int maxlevel, DumpProperties dp) {
DumpTable table = super.toDumpTable(pageContext, maxlevel,dp);
table.setTitle("Scope Client (Datasource)");
return table;
}
/**
*
* @see railo.runtime.type.Collection#duplicate(boolean)
*/
public Collection duplicate(boolean deepCopy) {
return new ClientDatasource(this,deepCopy);
}
/**
*
* @see railo.runtime.type.scope.ClientSupport#initialize(railo.runtime.PageContext)
*/
public void initialize(PageContext pc) {
this.pc=pc;
//print.out(isNew);
try {
if(!structOk)sct=_loadData(pc, datasourceName, false);
} catch (PageException e) {
//
}
super.initialize(pc);
}
}
| solved ticket https://issues.jboss.org/browse/RAILO-1113
| railo-java/railo-core/src/railo/runtime/type/scope/ClientDatasource.java | solved ticket https://issues.jboss.org/browse/RAILO-1113 |
|
Java | apache-2.0 | 09fbe60d29805f8826f760d1cd46cc342f5f72cf | 0 | realityforge/replicant,realityforge/replicant | package replicant;
import arez.annotations.Action;
import arez.annotations.ArezComponent;
import arez.annotations.Observable;
import arez.annotations.PreDispose;
import java.util.Objects;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import replicant.spy.AreaOfInterestDisposedEvent;
import replicant.spy.AreaOfInterestStatusUpdatedEvent;
import static org.realityforge.braincheck.Guards.*;
/**
* The channel description declares a desired channel subscription and also
* includes data on the current status of the subscription.
*/
@ArezComponent
public abstract class AreaOfInterest
extends ReplicantService
{
public enum Status
{
NOT_ASKED,
LOADING,
LOADED,
LOAD_FAILED,
UPDATING,
UPDATED,
UPDATE_FAILED,
UNLOADING,
UNLOADED
}
@Nonnull
private final ChannelAddress _address;
@Nullable
private Object _filter;
@Nonnull
private Status _status = Status.NOT_ASKED;
@Nonnull
static AreaOfInterest create( @Nullable final ReplicantContext context,
@Nonnull final ChannelAddress address,
@Nullable final Object filter )
{
return new Arez_AreaOfInterest( context, address, filter );
}
AreaOfInterest( @Nullable final ReplicantContext context,
@Nonnull final ChannelAddress address,
@Nullable final Object filter )
{
super( context );
_address = Objects.requireNonNull( address );
_filter = filter;
}
@PreDispose
final void preDispose()
{
if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() )
{
getReplicantContext().getSpy().reportSpyEvent( new AreaOfInterestDisposedEvent( this ) );
}
}
@Nonnull
public final ChannelAddress getAddress()
{
return _address;
}
@Observable
@Nullable
public Object getFilter()
{
return _filter;
}
void setFilter( @Nullable final Object filter )
{
_filter = filter;
}
@Observable
@Nonnull
public Status getStatus()
{
return _status;
}
void setStatus( @Nonnull final Status status )
{
_status = Objects.requireNonNull( status );
}
@Observable
@Nullable
public abstract Throwable getError();
abstract void setError( @Nullable Throwable error );
@Observable
@Nullable
public abstract Subscription getSubscription();
abstract void setSubscription( @Nullable Subscription subscription );
/**
* Update the status of the AreaOfInterest.
*/
@Action
void updateAreaOfInterest( @Nonnull final Status status, @Nullable final Throwable error )
{
final boolean expectSubscription =
Status.LOADED == status ||
Status.UPDATED == status ||
Status.UPDATING == status ||
Status.UPDATE_FAILED == status ||
Status.UNLOADING == status;
if ( Replicant.shouldCheckApiInvariants() )
{
final boolean expectError = Status.LOAD_FAILED == status || Status.UPDATE_FAILED == status;
final Subscription subscription = getReplicantContext().findSubscription( getAddress() );
final ChannelAddress address = getAddress();
apiInvariant( () -> !expectError || null != error,
() -> "Replicant-0016: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " but failed to supply " +
"the expected error." );
apiInvariant( () -> expectError || null == error,
() -> "Replicant-0017: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " and supplied an unexpected error." );
apiInvariant( () -> !expectSubscription || null != subscription,
() -> "Replicant-0018: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " and the context is missing expected subscription." );
apiInvariant( () -> expectSubscription || null == subscription,
() -> "Replicant-0019: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " and found unexpected subscription in the context." );
}
setStatus( status );
setSubscription( expectSubscription ? getReplicantContext().findSubscription( getAddress() ) : null );
setError( error );
if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() )
{
getReplicantContext().getSpy().reportSpyEvent( new AreaOfInterestStatusUpdatedEvent( this ) );
}
}
@Override
public String toString()
{
if ( Replicant.areNamesEnabled() )
{
return "AreaOfInterest[" + _address +
( null == _filter ? "" : " Filter: " + FilterUtil.filterToString( _filter ) ) +
" Status: " + _status + "]";
}
else
{
return super.toString();
}
}
}
| client/src/main/java/replicant/AreaOfInterest.java | package replicant;
import arez.annotations.Action;
import arez.annotations.ArezComponent;
import arez.annotations.ComponentId;
import arez.annotations.Observable;
import arez.annotations.PreDispose;
import java.util.Objects;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import replicant.spy.AreaOfInterestDisposedEvent;
import replicant.spy.AreaOfInterestStatusUpdatedEvent;
import static org.realityforge.braincheck.Guards.*;
/**
* The channel description declares a desired channel subscription and also
* includes data on the current status of the subscription.
*/
@ArezComponent
public abstract class AreaOfInterest
extends ReplicantService
{
public enum Status
{
NOT_ASKED,
LOADING,
LOADED,
LOAD_FAILED,
UPDATING,
UPDATED,
UPDATE_FAILED,
UNLOADING,
UNLOADED
}
@Nonnull
private final ChannelAddress _address;
@Nullable
private Object _filter;
@Nonnull
private Status _status = Status.NOT_ASKED;
@Nonnull
static AreaOfInterest create( @Nullable final ReplicantContext context,
@Nonnull final ChannelAddress address,
@Nullable final Object filter )
{
return new Arez_AreaOfInterest( context, address, filter );
}
AreaOfInterest( @Nullable final ReplicantContext context,
@Nonnull final ChannelAddress address,
@Nullable final Object filter )
{
super( context );
_address = Objects.requireNonNull( address );
_filter = filter;
}
@PreDispose
final void preDispose()
{
if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() )
{
getReplicantContext().getSpy().reportSpyEvent( new AreaOfInterestDisposedEvent( this ) );
}
}
@Nonnull
public final ChannelAddress getAddress()
{
return _address;
}
@Observable
@Nullable
public Object getFilter()
{
return _filter;
}
void setFilter( @Nullable final Object filter )
{
_filter = filter;
}
@Observable
@Nonnull
public Status getStatus()
{
return _status;
}
void setStatus( @Nonnull final Status status )
{
_status = Objects.requireNonNull( status );
}
@Observable
@Nullable
public abstract Throwable getError();
abstract void setError( @Nullable Throwable error );
@Observable
@Nullable
public abstract Subscription getSubscription();
abstract void setSubscription( @Nullable Subscription subscription );
/**
* Update the status of the AreaOfInterest.
*/
@Action
void updateAreaOfInterest( @Nonnull final Status status, @Nullable final Throwable error )
{
final boolean expectSubscription =
Status.LOADED == status ||
Status.UPDATED == status ||
Status.UPDATING == status ||
Status.UPDATE_FAILED == status ||
Status.UNLOADING == status;
if ( Replicant.shouldCheckApiInvariants() )
{
final boolean expectError = Status.LOAD_FAILED == status || Status.UPDATE_FAILED == status;
final Subscription subscription = getReplicantContext().findSubscription( getAddress() );
final ChannelAddress address = getAddress();
apiInvariant( () -> !expectError || null != error,
() -> "Replicant-0016: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " but failed to supply " +
"the expected error." );
apiInvariant( () -> expectError || null == error,
() -> "Replicant-0017: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " and supplied an unexpected error." );
apiInvariant( () -> !expectSubscription || null != subscription,
() -> "Replicant-0018: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " and the context is missing expected subscription." );
apiInvariant( () -> expectSubscription || null == subscription,
() -> "Replicant-0019: Invoked updateAreaOfInterest for channel at address " +
address + " with status " + status + " and found unexpected subscription in the context." );
}
setStatus( status );
setSubscription( expectSubscription ? getReplicantContext().findSubscription( getAddress() ) : null );
setError( error );
if ( Replicant.areSpiesEnabled() && getReplicantContext().getSpy().willPropagateSpyEvents() )
{
getReplicantContext().getSpy().reportSpyEvent( new AreaOfInterestStatusUpdatedEvent( this ) );
}
}
@Override
public String toString()
{
if ( Replicant.areNamesEnabled() )
{
return "AreaOfInterest[" + _address +
( null == _filter ? "" : " Filter: " + FilterUtil.filterToString( _filter ) ) +
" Status: " + _status + "]";
}
else
{
return super.toString();
}
}
}
| Optimize imports
| client/src/main/java/replicant/AreaOfInterest.java | Optimize imports |
|
Java | apache-2.0 | 9745c7367f0b9ffee91e5a88e1d34bdace6a04d3 | 0 | lexs/webimageloader | package com.webimageloader.util;
import java.io.Closeable;
import java.io.File;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Environment;
public class IOUtil {
public static void closeQuietly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (Exception ignored) {
}
}
}
/**
* Get a usable cache directory (external if available, internal otherwise).
*
* @param context The context to use
* @param uniqueName A unique directory name to append to the cache dir
* @return The cache dir
*/
public static File getDiskCacheDir(Context context, String uniqueName) {
// Check if media is mounted or storage is built-in, if so, try and use external cache dir
// otherwise use internal cache dir
final String cachePath = Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState()) ||
!isExternalStorageRemovable() ?
getExternalCacheDir(context).getPath() :
context.getCacheDir().getPath();
return new File(cachePath + File.separator + uniqueName);
}
/**
* Check if external storage is built-in or removable.
*
* @return True if external storage is removable (like an SD card), false
* otherwise.
*/
@TargetApi(9)
public static boolean isExternalStorageRemovable() {
if (Android.isAPI(9)) {
return Environment.isExternalStorageRemovable();
}
return true;
}
/**
* Get the external app cache directory.
*
* @param context The context to use
* @return The external cache dir
*/
@TargetApi(8)
public static File getExternalCacheDir(Context context) {
// Do we have a built-in external cache dir method.
if (Android.isAPI(8)) {
File cacheDir = context.getExternalCacheDir();
if (cacheDir != null) {
return cacheDir;
}
}
// Before Froyo we need to construct the external cache dir ourselves
final String cacheDir = "/Android/data/" + context.getPackageName() + "/cache/";
return new File(Environment.getExternalStorageDirectory().getPath() + cacheDir);
}
private IOUtil() {}
}
| webimageloader/src/com/webimageloader/util/IOUtil.java | package com.webimageloader.util;
import java.io.Closeable;
import java.io.File;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Environment;
public class IOUtil {
public static void closeQuietly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (Exception ignored) {
}
}
}
/**
* Get a usable cache directory (external if available, internal otherwise).
*
* @param context The context to use
* @param uniqueName A unique directory name to append to the cache dir
* @return The cache dir
*/
public static File getDiskCacheDir(Context context, String uniqueName) {
// Check if media is mounted or storage is built-in, if so, try and use external cache dir
// otherwise use internal cache dir
final String cachePath = Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState()) ||
!isExternalStorageRemovable() ?
getExternalCacheDir(context).getPath() :
context.getCacheDir().getPath();
return new File(cachePath + File.separator + uniqueName);
}
/**
* Check if external storage is built-in or removable.
*
* @return True if external storage is removable (like an SD card), false
* otherwise.
*/
@TargetApi(9)
public static boolean isExternalStorageRemovable() {
if (Android.isAPI(9)) {
return Environment.isExternalStorageRemovable();
}
return true;
}
/**
* Get the external app cache directory.
*
* @param context The context to use
* @return The external cache dir
*/
@TargetApi(8)
public static File getExternalCacheDir(Context context) {
if (hasExternalCacheDir()) {
File cacheDir = context.getExternalCacheDir();
if (cacheDir != null) {
return cacheDir;
}
}
// Before Froyo we need to construct the external cache dir ourselves
final String cacheDir = "/Android/data/" + context.getPackageName() + "/cache/";
return new File(Environment.getExternalStorageDirectory().getPath() + cacheDir);
}
/**
* Check if OS version has built-in external cache dir method.
*/
public static boolean hasExternalCacheDir() {
return Android.isAPI(9);
}
private IOUtil() {}
}
| Use correct API level when checking for functionality
| webimageloader/src/com/webimageloader/util/IOUtil.java | Use correct API level when checking for functionality |
|
Java | apache-2.0 | 9e1905aa1b1e2e018bb8ab44f0afb1c7b86d7797 | 0 | matthewrkula/seam-carver | package com.mattkula.seamcarver;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.Buffer;
/**
* Created by matt on 12/2/14.
*/
public class Carver {
public static boolean DEBUG = true;
String mName;
BufferedImage mImage;
BufferedImage mGreyscaleImage;
BufferedImage mEdgeImage;
int height, width;
public Carver(String name) throws IOException {
mName = name;
setImage(ImageIO.read(new File(name)));
}
private void setImage(BufferedImage image) {
mImage = image;
height = mImage.getHeight();
width = mImage.getWidth();
generateGreyscaleImage();
detectEdges();
}
private void generateGreyscaleImage() {
mGreyscaleImage = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
Graphics g = mGreyscaleImage.getGraphics();
g.drawImage(mImage, 0, 0, null);
g.dispose();
}
private void detectEdges() {
mEdgeImage = EdgeDetector.getEdges(mGreyscaleImage, EdgeDetector.Type.SOBEL);
if (DEBUG) Utils.saveImage("edges.png", mEdgeImage);
}
public void carve(int rows, int columns) {
for (int i=0; i < columns; i++) {
verticalCarve();
}
Utils.saveImage("done.png", mImage);
}
private void verticalCarve() {
PixelPosition[][] pathMap = new PixelPosition[height][width];
for (int x = 0; x < width; x++) {
pathMap[0][x] = new PixelPosition(getCost(x, 0), null, x, 0);
}
int lowestCost = Integer.MAX_VALUE;
PixelPosition lowestPixel = null;
int cost;
for (int y = 1; y < height; y++) {
for (int x = 0; x < width; x++) {
for (int i = -1; i < 2; i++ ) {
cost = getCost(x + i, y - 1);
if (cost < lowestCost) {
lowestCost = cost;
lowestPixel = pathMap[y - 1][x + i];
}
}
pathMap[y][x] = new PixelPosition(getCost(x, y) + lowestPixel.cost, lowestPixel, x, y);
lowestCost = Integer.MAX_VALUE;
lowestPixel = null;
}
}
PixelPosition pixel = getStartingPixelPosition(pathMap[height-1]);
while (pixel != null) {
if (DEBUG) mImage.setRGB(pixel.x, pixel.y, 0xFFFF0000);
pixel.deleted = true;
pixel = pixel.previousPosition;
}
BufferedImage newImage = new BufferedImage(width - 1, height, BufferedImage.TYPE_INT_ARGB);
int foundOffset = 0;
for (int y = 0; y < height; y++) {
foundOffset = 0;
for (int x = 0; x < width - 1; x++) {
if (pathMap[y][x].deleted) {
foundOffset = 1;
}
newImage.setRGB(x, y, mImage.getRGB(x + foundOffset, y));
}
}
if (DEBUG) Utils.saveImage("lastline.png", mImage);
setImage(newImage);
}
private PixelPosition getStartingPixelPosition(PixelPosition[] array) {
PixelPosition smallestNumber = array[0];
for (int i = 1; i < array.length; i++) {
if (array[i].cost < smallestNumber.cost) {
smallestNumber = array[i];
}
}
return smallestNumber;
}
private int getCost(int x, int y) {
if (x < 0 || y < 0 || x >= width || y >= height) {
return Integer.MAX_VALUE;
}
return (int)Math.sqrt(mEdgeImage.getRGB(x, y) & 0xFF);
}
private class PixelPosition {
int cost, x, y;
boolean deleted = false;
PixelPosition previousPosition = null;
public PixelPosition(int cost, PixelPosition previousPosition, int x, int y) {
this.previousPosition = previousPosition;
this.cost = cost;
this.x = x;
this.y = y;
}
}
}
| src/com/mattkula/seamcarver/Carver.java | package com.mattkula.seamcarver;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.nio.Buffer;
/**
* Created by matt on 12/2/14.
*/
public class Carver {
public static boolean DEBUG = true;
String mName;
BufferedImage mImage;
BufferedImage mGreyscaleImage;
BufferedImage mEdgeImage;
int height, width;
public Carver(String name) throws IOException {
mName = name;
setImage(ImageIO.read(new File(name)));
}
private void setImage(BufferedImage image) {
mImage = image;
height = mImage.getHeight();
width = mImage.getWidth();
generateGreyscaleImage();
detectEdges();
}
private void generateGreyscaleImage() {
mGreyscaleImage = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_GRAY);
Graphics g = mGreyscaleImage.getGraphics();
g.drawImage(mImage, 0, 0, null);
g.dispose();
}
private void detectEdges() {
mEdgeImage = EdgeDetector.getEdges(mGreyscaleImage, EdgeDetector.Type.SOBEL);
if (DEBUG) Utils.saveImage("edges.png", mEdgeImage);
}
public void carve(int rows, int columns) {
for (int i=0; i < columns; i++) {
verticalCarve();
}
Utils.saveImage("done.png", mImage);
}
private void verticalCarve() {
PixelPosition[][] pathMap = new PixelPosition[height][width];
for (int x = 0; x < width; x++) {
pathMap[0][x] = new PixelPosition(getCost(x, 0), null, x, 0);
}
int lowestCost = Integer.MAX_VALUE;
PixelPosition lowestPixel = null;
int cost;
for (int y = 1; y < height; y++) {
for (int x = 0; x < width; x++) {
for (int i = -1; i < 2; i++ ) {
cost = getCost(x + i, y - 1);
if (cost < lowestCost) {
lowestCost = cost;
lowestPixel = pathMap[y - 1][x + i];
}
}
pathMap[y][x] = new PixelPosition(getCost(x, y) + lowestPixel.cost, lowestPixel, x, y);
lowestCost = Integer.MAX_VALUE;
lowestPixel = null;
}
}
PixelPosition pixel = getStartingPixelPosition(pathMap[height-1]);
while (pixel != null) {
if (DEBUG) mImage.setRGB(pixel.x, pixel.y, 0xFFFF0000);
pixel.deleted = true;
pixel = pixel.previousPosition;
}
BufferedImage newImage = new BufferedImage(width - 1, height, BufferedImage.TYPE_INT_ARGB);
int foundOffset = 0;
for (int y = 0; y < height; y++) {
foundOffset = 0;
for (int x = 0; x < width - 1; x++) {
if (pathMap[y][x].deleted) {
foundOffset = 1;
}
newImage.setRGB(x, y, mImage.getRGB(x + foundOffset, y));
}
}
if (DEBUG) Utils.saveImage("lastline.png", mImage);
setImage(newImage);
}
private PixelPosition getStartingPixelPosition(PixelPosition[] array) {
PixelPosition smallestNumber = array[0];
for (int i = 1; i < array.length; i++) {
if (array[i].cost < smallestNumber.cost) {
smallestNumber = array[i];
}
}
return smallestNumber;
}
private int getCost(int x, int y) {
if (x < 0 || y < 0 || x >= width || y >= height) {
return Integer.MAX_VALUE;
}
return (int)Math.sqrt(mEdgeImage.getRGB(x, y) & 0xFF);
}
private class PixelPosition {
public PixelPosition(int cost, PixelPosition previousPosition, int x, int y) {
this.previousPosition = previousPosition;
this.cost = cost;
this.x = x;
this.y = y;
}
int cost = -1;
int x, y;
PixelPosition previousPosition = null;
boolean deleted = false;
}
}
| Small change
| src/com/mattkula/seamcarver/Carver.java | Small change |
|
Java | apache-2.0 | fafc73de5e0c4fb57ace621a695d6b52eefdb78d | 0 | googleinterns/step200-2020,googleinterns/step200-2020,googleinterns/step200-2020 | package com.google.sps.servlets;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.Key;
import com.google.common.flogger.FluentLogger;
import com.google.gson.Gson;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/** Servlet that returns start location and destinations user inputs */
@MultipartConfig
@WebServlet("/api/destinations")
public class DestinationsServlet extends HttpServlet {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final Gson gson = new Gson();
private final DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
private Key userKey;
@Override
public void init() {
Entity userEntity = new Entity("UserInputs");
userEntity.setProperty("start", "");
userEntity.setProperty("destinations", new ArrayList<String>());
datastore.put(userEntity);
userKey = userEntity.getKey();
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try {
entity = datastore.get(userKey);
} catch (EntityNotFoundException e) {
UserLocations userLocations = new UserLocations("", new ArrayList<String>());
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
logger.atInfo().withCause(e).log("Unable to find UserLocations Entity %s", userKey);
return;
}
String start = (String) entity.getProperty("start");
ArrayList<String> destinations = (ArrayList<String>) entity.getProperty("destinations");
UserLocations userLocations = new UserLocations(start, destinations);
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try {
entity = datastore.get(userKey);
} catch (EntityNotFoundException e) {
UserLocations userLocations = new UserLocations("", new ArrayList<String>());
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
logger.atInfo().withCause(e).log("Unable to find UserLocations Entity %s", userKey);
return;
}
String start = request.getParameter("start-location");
String destination = request.getParameter("destinations");
entity.setProperty("start", start);
ArrayList<String> destinations = (ArrayList<String>) entity.getProperty("destinations");
if (destinations == null) {
destinations = new ArrayList<String>();
}
destinations.add(destination);
entity.setProperty("destinations", destinations);
datastore.put(entity);
UserLocations userLocations = new UserLocations(start, destinations);
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
}
}
| byway/src/main/java/com/google/sps/servlets/DestinationsServlet.java | package com.google.sps.servlets;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.Key;
import com.google.common.flogger.FluentLogger;
import com.google.gson.Gson;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/** Servlet that returns start location and destinations user inputs */
@MultipartConfig
@WebServlet("/api/destinations")
public class DestinationsServlet extends HttpServlet {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final Gson gson = new Gson();
private final DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
private Key userKey;
@Override
public void init() {
Entity userEntity = new Entity("UserInputs");
userEntity.setProperty("start", "");
userEntity.setProperty("destinations", new ArrayList<String>());
datastore.put(userEntity);
userKey = userEntity.getKey();
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try {
entity = datastore.get(userKey);
} catch (EntityNotFoundException e) {
UserLocations userLocations = new UserLocations("", new ArrayList<String>());
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
logger.atInfo().withCause(e).log("Unable to find UserLocations Entity %s", userKey);
return;
}
String start = (String) entity.getProperty("start");
ArrayList<String> destinations = (ArrayList<String>) entity.getProperty("destinations");
UserLocations userLocations = new UserLocations(start, destinations);
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try {
entity = datastore.get(userKey);
} catch (EntityNotFoundException e) {
UserLocations userLocations = new UserLocations("", new ArrayList<String>());
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
logger.atInfo().withCause(e).log("Unable to find UserLocations Entity %s", userKey);
return;
}
String start = request.getParameter("start-location");
String destination = request.getParameter("destinations");
entity.setProperty("start", start);
ArrayList<String> destinations = (ArrayList<String>) entity.getProperty("destinations");
if(destinations == null){
destinations = new ArrayList<String>();
}
destinations.add(destination);
entity.setProperty("destinations", destinations);
datastore.put(entity);
UserLocations userLocations = new UserLocations(start, destinations);
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
}
}
| fix java format
| byway/src/main/java/com/google/sps/servlets/DestinationsServlet.java | fix java format |
|
Java | apache-2.0 | e5fe524a585e3428d5bc32129fc7343afe6691bb | 0 | TechnologyConversations/TechnologyConversationsBdd,TechnologyConversations/TechnologyConversationsBdd,TechnologyConversations/TechnologyConversationsBdd,TechnologyConversations/TechnologyConversationsBdd,TechnologyConversations/TechnologyConversationsBdd | package models;
import org.jbehave.core.io.CodeLocations;
import org.jbehave.core.io.StoryFinder;
import org.jbehave.core.junit.JUnitStories;
import org.jbehave.core.steps.InjectableStepsFactory;
import org.jbehave.core.steps.InstanceStepsFactory;
import java.util.ArrayList;
import java.util.List;
public class JBehaveRunner extends JUnitStories {
private String storyPath;
public void setStoryPath(String value) {
storyPath = value;
}
public String getStoryPath() {
return storyPath;
}
private List<Object> stepsInstances;
public void setStepsInstancesFromNames(List<String> value) throws Exception {
stepsInstances = new ArrayList<>();
for (String className : value) {
stepsInstances.add(Class.forName(className).newInstance());
}
}
public List<Object> getStepsInstances() {
return stepsInstances;
}
public JBehaveRunner(String storyPathValue, List<String> stepsInstancesNames) throws Exception {
setStoryPath(storyPathValue);
setStepsInstancesFromNames(stepsInstancesNames);
}
@Override
protected List<String> storyPaths() {
return new StoryFinder()
.findPaths(CodeLocations.codeLocationFromPath(""), getStoryPath(), "");
}
@Override
public InjectableStepsFactory stepsFactory() {
return new InstanceStepsFactory(configuration(), stepsInstances);
}
}
| app/models/JBehaveRunner.java | package models;
import org.jbehave.core.io.CodeLocations;
import org.jbehave.core.io.StoryFinder;
import org.jbehave.core.junit.JUnitStories;
import org.jbehave.core.steps.InjectableStepsFactory;
import org.jbehave.core.steps.InstanceStepsFactory;
import java.util.ArrayList;
import java.util.List;
public class JBehaveRunner extends JUnitStories {
private String storyPath;
public void setStoryPath(String value) {
storyPath = value;
}
public String getStoryPath() {
return storyPath;
}
private List<Object> stepsInstances;
public void setStepsInstancesFromNames(List<String> value) throws Exception {
stepsInstances = new ArrayList<Object>();
for (String className : value) {
stepsInstances.add(Class.forName(className).newInstance());
}
}
public List<Object> getStepsInstances() {
return stepsInstances;
}
public JBehaveRunner(String storyPathValue, List<String> stepsInstancesNames) throws Exception {
setStoryPath(storyPathValue);
setStepsInstancesFromNames(stepsInstancesNames);
}
@Override
protected List<String> storyPaths() {
return new StoryFinder()
.findPaths(CodeLocations.codeLocationFromPath(""), getStoryPath(), "");
}
@Override
public InjectableStepsFactory stepsFactory() {
return new InstanceStepsFactory(configuration(), stepsInstances);
}
}
| Added JBehaveRunner java class.
| app/models/JBehaveRunner.java | Added JBehaveRunner java class. |
|
Java | apache-2.0 | fdb9cbb9d403af964b9d6945e3aa6f64203871cd | 0 | kolstae/openpipe | package no.trank.openpipe.solr.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import junit.framework.TestCase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @version $Revision$
*/
public class IOUtilTest extends TestCase {
private static final Logger log = LoggerFactory.getLogger(IOUtilTest.class);
private Random rnd = new Random();
public void testWriteReadUTF() throws Exception {
final byte[] buf = new byte[1024];
final ByteArrayInputStream bin = new ByteArrayInputStream(buf);
final MyByteArrayOutputStream bout = new MyByteArrayOutputStream(buf);
bin.mark(16);
for (int i = 0; i < 200; i += 7) {
final String text = generateRandomText(50, 150);
IOUtil.writeUTF(bout, text);
assertEquals(text, IOUtil.readUTF(bin));
bout.reset();
bin.reset();
}
}
public void testLongWriteReadUTF() throws Exception {
final ByteArrayOutputStream bout = new ByteArrayOutputStream(Short.MAX_VALUE + 4096);
final String text = generateRandomText(Short.MAX_VALUE, 127);
IOUtil.writeUTF(bout, text);
final ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
assertEquals(text, IOUtil.readUTF(bin));
}
private String generateRandomText(int baseLen, int varLen) {
final int length = rnd.nextInt(varLen) + baseLen;
final char[] chars = new char[length];
for (int i = 0; i < length; i++) {
if (i % 13 == 7) {
char c = getRandomChar(Character.MAX_VALUE, 128);
chars[i] = c;
} else {
chars[i] = getRandomChar(127, 0);
}
}
return new String(chars);
}
private char getRandomChar(int maxValue, int minValue) {
char c = (char)(rnd.nextInt(maxValue - minValue) + minValue);
while (!Character.isLetterOrDigit(c)) {
c = (char)(rnd.nextInt(maxValue- minValue) + minValue);
}
return c;
}
public void testWriteReadNibble() throws Exception {
final byte[] buf = new byte[5];
final ByteArrayInputStream bin = new ByteArrayInputStream(buf);
final MyByteArrayOutputStream bout = new MyByteArrayOutputStream(buf);
bin.mark(5);
for (int i = 0; i < Integer.MAX_VALUE && i >= 0; i += 13) {
testValue(bout, bin, i);
}
testAndPrint(bout, bin, buf, 0);
testAndPrint(bout, bin, buf, 128);
testAndPrint(bout, bin, buf, 5964);
testAndPrint(bout, bin, buf, Integer.MAX_VALUE);
}
private static void testAndPrint(MyByteArrayOutputStream bout, ByteArrayInputStream bin, byte[] buf, int value)
throws IOException {
final int len = testValue(bout, bin, value);
if (log.isDebugEnabled()) {
final StringBuilder sb = new StringBuilder(len * 5 + 16);
sb.append(value).append(':');
for (int i = 0; i < len; i++) {
sb.append(" 0x");
sb.append(Integer.toHexString(buf[i] & 0xff).toUpperCase());
}
log.debug("{}", sb);
}
}
private static int testValue(MyByteArrayOutputStream bout, ByteArrayInputStream bin, int value) throws IOException {
final int len = IOUtil.writeNibble(bout, value);
assertEquals(value, IOUtil.readNibble(bin));
bout.reset();
bin.reset();
return len;
}
private static class MyByteArrayOutputStream extends ByteArrayOutputStream {
private MyByteArrayOutputStream(byte[] bytes) {
buf = bytes;
}
}
}
| solr-tokenizer/src/test/java/no/trank/openpipe/solr/util/IOUtilTest.java | package no.trank.openpipe.solr.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import junit.framework.TestCase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @version $Revision$
*/
public class IOUtilTest extends TestCase {
private static final Logger log = LoggerFactory.getLogger(IOUtilTest.class);
private Random rnd = new Random();
public void testWriteReadUTF() throws Exception {
final byte[] buf = new byte[1024];
final ByteArrayInputStream bin = new ByteArrayInputStream(buf);
final MyByteArrayOutputStream bout = new MyByteArrayOutputStream(buf);
bin.mark(16);
for (int i = 0; i < 200; i += 7) {
final String text = generateRandomText(50, 150);
IOUtil.writeUTF(bout, text);
assertEquals(text, IOUtil.readUTF(bin));
bout.reset();
bin.reset();
}
}
public void testLongWriteReadUTF() throws Exception {
final ByteArrayOutputStream bout = new ByteArrayOutputStream(Short.MAX_VALUE + 4096);
final String text = generateRandomText(Short.MAX_VALUE, 127);
IOUtil.writeUTF(bout, text);
final ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());
assertEquals(text, IOUtil.readUTF(bin));
}
private String generateRandomText(int baseLen, int varLen) {
final int length = rnd.nextInt(varLen) + baseLen;
final char[] chars = new char[length];
for (int i = 0; i < length; i++) {
if (i % 13 == 7) {
char c = getRandomChar(Character.MAX_VALUE, 128);
chars[i] = c;
} else {
chars[i] = getRandomChar(127, 0);
}
}
return new String(chars);
}
private char getRandomChar(int maxValue, int minValue) {
char c = (char)(rnd.nextInt(maxValue - minValue) + minValue);
while (!Character.isLetterOrDigit(c)) {
c = (char)(rnd.nextInt(maxValue- minValue) + minValue);
}
return c;
}
public void testWriteReadNibble() throws Exception {
final byte[] buf = new byte[5];
final ByteArrayInputStream bin = new ByteArrayInputStream(buf);
final MyByteArrayOutputStream bout = new MyByteArrayOutputStream(buf);
bin.mark(5);
for (int i = 0; i < Integer.MAX_VALUE; i += 13) {
testValue(bout, bin, i);
}
testAndPrint(bout, bin, buf, 0);
testAndPrint(bout, bin, buf, 128);
testAndPrint(bout, bin, buf, 5964);
testAndPrint(bout, bin, buf, Integer.MAX_VALUE);
}
private static void testAndPrint(MyByteArrayOutputStream bout, ByteArrayInputStream bin, byte[] buf, int value)
throws IOException {
final int len = testValue(bout, bin, value);
if (log.isDebugEnabled()) {
final StringBuilder sb = new StringBuilder(len * 5 + 16);
sb.append(value).append(':');
for (int i = 0; i < len; i++) {
sb.append(" 0x");
sb.append(Integer.toHexString(buf[i] & 0xff).toUpperCase());
}
log.debug("{}", sb);
}
}
private static int testValue(MyByteArrayOutputStream bout, ByteArrayInputStream bin, int value) throws IOException {
final int len = IOUtil.writeNibble(bout, value);
assertEquals(value, IOUtil.readNibble(bin));
bout.reset();
bin.reset();
return len;
}
private static class MyByteArrayOutputStream extends ByteArrayOutputStream {
private MyByteArrayOutputStream(byte[] bytes) {
buf = bytes;
}
}
}
| Fixed IOUtilTest.testWriteReadNibble
| solr-tokenizer/src/test/java/no/trank/openpipe/solr/util/IOUtilTest.java | Fixed IOUtilTest.testWriteReadNibble |
|
Java | apache-2.0 | 1b6e2c3ddf9e4b2c4c06d4d1edb66a445d8a2f89 | 0 | eminn/hazelcast-simulator,pveentjer/hazelcast-simulator,Danny-Hazelcast/hazelcast-stabilizer,Donnerbart/hazelcast-simulator,Donnerbart/hazelcast-simulator,pveentjer/hazelcast-simulator,Danny-Hazelcast/hazelcast-stabilizer,fengshao0907/hazelcast-simulator,gAmUssA/hazelcast-simulator,jerrinot/hazelcast-stabilizer,eminn/hazelcast-simulator,fengshao0907/hazelcast-simulator,hazelcast/hazelcast-simulator,hazelcast/hazelcast-simulator,gAmUssA/hazelcast-simulator,hasancelik/hazelcast-stabilizer,hazelcast/hazelcast-simulator,jerrinot/hazelcast-stabilizer,hasancelik/hazelcast-stabilizer | package com.hazelcast.stabilizer.tests.map;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.core.Member;
import com.hazelcast.core.Partition;
import com.hazelcast.core.PartitionService;
import com.hazelcast.instance.HazelcastInstanceProxy;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.partition.InternalPartition;
import com.hazelcast.partition.PartitionServiceProxy;
import com.hazelcast.partition.impl.InternalPartitionServiceImpl;
import com.hazelcast.stabilizer.tests.TestContext;
import com.hazelcast.stabilizer.tests.annotations.Run;
import com.hazelcast.stabilizer.tests.annotations.Setup;
import com.hazelcast.stabilizer.tests.annotations.Verify;
import com.hazelcast.stabilizer.tests.annotations.Warmup;
import com.hazelcast.stabilizer.tests.utils.ThreadSpawner;
import java.lang.reflect.Field;
import static org.junit.Assert.assertEquals;
public class DataTeg {
public String basename = this.getClass().getName();
public int maxItems=10000;
public int clusterSize=6;
public int nodeKillCount=2;
private TestContext testContext;
private HazelcastInstance targetInstance;
public DataTeg(){ }
@Setup
public void setup(TestContext testContext) throws Exception {
this.testContext = testContext;
targetInstance = testContext.getTargetInstance();
while ( targetInstance.getCluster().getMembers().size() != clusterSize ){
System.out.println(basename+" waiting cluster == "+clusterSize);
Thread.sleep(1000);
}
final PartitionService ps = targetInstance.getPartitionService();
for (Partition partition : ps.getPartitions()) {
while (partition.getOwner() == null) {
System.out.println(basename+" partition owner ?");
Thread.sleep(1000);
}
}
}
@Warmup(global = true)
public void warmup(){
IMap map = targetInstance.getMap(basename);
for(int i=0; i<maxItems; i++){
map.put(i, i);
}
/*
try {
final PartitionServiceProxy partitionService = (PartitionServiceProxy) targetInstance.getPartitionService();
final Field field = PartitionServiceProxy.class.getDeclaredField("partitionService");
field.setAccessible(true);
final InternalPartitionServiceImpl internalPartitionService = (InternalPartitionServiceImpl) field.get(partitionService);
final Field partitionsField = InternalPartitionServiceImpl.class.getDeclaredField("partitions");
partitionsField.setAccessible(true);
final InternalPartition[] partitions = (InternalPartition[]) partitionsField.get(internalPartitionService);
for (InternalPartition partition : partitions) {
if(partition.getOwner().getHost().equals(partition.getReplicaAddress(1).getHost())){
System.out.println(basename+"----------------ERROR---------------------------------");
System.out.println(basename+"owner: " + partition.getOwner().getHost());
System.out.println(basename+"back : " + partition.getReplicaAddress(1).getHost());
}
}
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
*/
}
@Run
public void run() {
ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId());
spawner.spawn(new Worker());
spawner.awaitCompletion();
}
private class Worker implements Runnable {
@Override
public void run() {
while (!testContext.isStopped()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
@Verify(global = false)
public void verify() throws Exception {
IMap map = targetInstance.getMap(basename);
if ( targetInstance.getCluster().getMembers().size() == clusterSize-nodeKillCount ){
int max=0;
while(map.size() != maxItems){
Thread.sleep(1000);
if(max++==60){
break;
}
}
assertEquals("data loss ", map.size(), maxItems);
System.out.println(basename+"verify OK ");
}else{
System.out.println(basename+": cluster size ="+ targetInstance.getCluster().getMembers().size() );
System.out.println(basename+": map size ="+ map.size());
}
}
public static long nextKeyOwnedby(long key, HazelcastInstance instance) {
final Member localMember = instance.getCluster().getLocalMember();
final PartitionService partitionService = instance.getPartitionService();
for ( ; ; ) {
Partition partition = partitionService.getPartition(key);
if (localMember.equals(partition.getOwner())) {
return key;
}
key++;
}
}
}
| stabilizer/src/main/java/com/hazelcast/stabilizer/tests/map/DataTeg.java | package com.hazelcast.stabilizer.tests.map;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.IMap;
import com.hazelcast.core.Member;
import com.hazelcast.core.Partition;
import com.hazelcast.core.PartitionService;
import com.hazelcast.instance.HazelcastInstanceProxy;
import com.hazelcast.logging.ILogger;
import com.hazelcast.logging.Logger;
import com.hazelcast.partition.InternalPartition;
import com.hazelcast.partition.PartitionServiceProxy;
import com.hazelcast.partition.impl.InternalPartitionServiceImpl;
import com.hazelcast.stabilizer.tests.TestContext;
import com.hazelcast.stabilizer.tests.annotations.Run;
import com.hazelcast.stabilizer.tests.annotations.Setup;
import com.hazelcast.stabilizer.tests.annotations.Verify;
import com.hazelcast.stabilizer.tests.annotations.Warmup;
import com.hazelcast.stabilizer.tests.utils.ThreadSpawner;
import java.lang.reflect.Field;
import static org.junit.Assert.assertEquals;
public class DataTeg {
public String basename = this.getClass().getName();
public int maxItems=10000;
public int clusterSize=6;
public int nodeKillCount=1;
private TestContext testContext;
private HazelcastInstance targetInstance;
public DataTeg(){ }
@Setup
public void setup(TestContext testContext) throws Exception {
this.testContext = testContext;
targetInstance = testContext.getTargetInstance();
while ( targetInstance.getCluster().getMembers().size() != clusterSize ){
System.out.println(basename+" waiting cluster == "+clusterSize);
Thread.sleep(1000);
}
final PartitionService ps = targetInstance.getPartitionService();
for (Partition partition : ps.getPartitions()) {
while (partition.getOwner() == null) {
System.out.println(basename+" partition owner ?");
Thread.sleep(1000);
}
}
}
@Warmup(global = true)
public void warmup(){
IMap map = targetInstance.getMap(basename);
for(int i=0; i<maxItems; i++){
map.put(i, i);
}
/*
try {
final PartitionServiceProxy partitionService = (PartitionServiceProxy) targetInstance.getPartitionService();
final Field field = PartitionServiceProxy.class.getDeclaredField("partitionService");
field.setAccessible(true);
final InternalPartitionServiceImpl internalPartitionService = (InternalPartitionServiceImpl) field.get(partitionService);
final Field partitionsField = InternalPartitionServiceImpl.class.getDeclaredField("partitions");
partitionsField.setAccessible(true);
final InternalPartition[] partitions = (InternalPartition[]) partitionsField.get(internalPartitionService);
for (InternalPartition partition : partitions) {
if(partition.getOwner().getHost().equals(partition.getReplicaAddress(1).getHost())){
System.out.println(basename+"----------------ERROR---------------------------------");
System.out.println(basename+"owner: " + partition.getOwner().getHost());
System.out.println(basename+"back : " + partition.getReplicaAddress(1).getHost());
}
}
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
*/
}
@Run
public void run() {
ThreadSpawner spawner = new ThreadSpawner(testContext.getTestId());
spawner.spawn(new Worker());
spawner.awaitCompletion();
}
private class Worker implements Runnable {
@Override
public void run() {
while (!testContext.isStopped()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
@Verify(global = false)
public void verify() throws Exception {
IMap map = targetInstance.getMap(basename);
if ( targetInstance.getCluster().getMembers().size() == clusterSize-nodeKillCount ){
int max=0;
while(map.size() != maxItems){
System.out.println(basename+": verify map size ="+ map.size() +" target = "+maxItems );
Thread.sleep(10000);
if(max++==5){
break;
}
}
assertEquals("data loss", map.size(), maxItems);
System.out.println(basename+"verify OK "+map.size()+"=="+maxItems);
}
}
public static long nextKeyOwnedby(long key, HazelcastInstance instance) {
final Member localMember = instance.getCluster().getLocalMember();
final PartitionService partitionService = instance.getPartitionService();
for ( ; ; ) {
Partition partition = partitionService.getPartition(key);
if (localMember.equals(partition.getOwner())) {
return key;
}
key++;
}
}
}
| testing
| stabilizer/src/main/java/com/hazelcast/stabilizer/tests/map/DataTeg.java | testing |
|
Java | apache-2.0 | 5c0040386b098228768b6b5ac481fc56712594ab | 0 | songzhw/Hello-kotlin,songzhw/Hello-kotlin | package threads;
/*
三个不同的线程将会共用一个 Foo 实例。
线程 A 将会调用 one() 方法
线程 B 将会调用 two() 方法
线程 C 将会调用 three() 方法
请设计修改程序,以确保 two() 方法在 one() 方法之后被执行,three() 方法在 two() 方法之后被执行。
示例 2:
输入: [1,3,2]
输出: "onetwothree"
解释:
输入 [1,3,2] 表示线程 A 将会调用 one() 方法,线程 B 将会调用 three() 方法,线程 C 将会调用 two() 方法。
正确的输出是 "onetwothree"。
链接:https://leetcode-cn.com/problems/print-in-order
*/
class Foo {
public void one() { System.out.println("one"); }
public void two() { System.out.println("two"); }
public void three() { System.out.println("three"); }
}
public class PrintInOrder {
public static void main(String[] args) {
Foo foo = new Foo();
Thread a = new Thread(foo::one);
Thread b = new Thread(foo::two);
Thread c = new Thread(foo::three);
c.start();
b.start();
a.start();
} // 一般情况下, 因为多线程所以不保证输出一定按start()的线程的顺序, 即上面代码不一定是: "threetwoone"
} | Advanced_kb/src/threads/PrintInOrder.java | package threads;
/*
三个不同的线程将会共用一个 Foo 实例。
线程 A 将会调用 one() 方法
线程 B 将会调用 two() 方法
线程 C 将会调用 three() 方法
请设计修改程序,以确保 two() 方法在 one() 方法之后被执行,three() 方法在 two() 方法之后被执行。
示例 2:
输入: [1,3,2]
输出: "onetwothree"
解释:
输入 [1,3,2] 表示线程 A 将会调用 one() 方法,线程 B 将会调用 three() 方法,线程 C 将会调用 two() 方法。
正确的输出是 "onetwothree"。
链接:https://leetcode-cn.com/problems/print-in-order
*/
class Foo {
public void one() { System.out.println("one"); }
public void two() { System.out.println("two"); }
public void three() { System.out.println("three"); }
}
public class PrintInOrder {
public static void main(String[] args) {
System.out.println("hello world");
}
} | thread - print in order
| Advanced_kb/src/threads/PrintInOrder.java | thread - print in order |
|
Java | apache-2.0 | 1b96147306eb34f00e06725e1bb58f55b819928e | 0 | folio-org/raml-module-builder,folio-org/raml-module-builder,folio-org/raml-module-builder | package org.folio.rest.impl;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.folio.rest.annotations.Validate;
import org.folio.rest.jaxrs.model.TenantAttributes;
import org.folio.rest.jaxrs.resource.Tenant;
import org.folio.rest.persist.PostgresClient;
import org.folio.dbschema.Schema;
import org.folio.rest.persist.ddlgen.SchemaMaker;
import org.folio.dbschema.TenantOperation;
import org.folio.rest.tools.ClientGenerator;
import org.folio.dbschema.ObjectMapperTool;
import org.folio.rest.tools.client.exceptions.ResponseException;
import org.folio.rest.tools.utils.OutStream;
import org.folio.rest.tools.utils.TenantTool;
import freemarker.template.TemplateException;
import io.vertx.core.AsyncResult;
import io.vertx.core.Context;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.Promise;
import io.vertx.core.json.JsonArray;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
import io.vertx.sqlclient.Row;
/**
* @author shale
*
*/
public class TenantAPI implements Tenant {
public static final String TABLE_JSON = "templates/db_scripts/schema.json";
private static final Logger log = LoggerFactory.getLogger(TenantAPI.class);
PostgresClient postgresClient(Context context) {
return PostgresClient.getInstance(context.owner());
}
@Validate
@Override
public void deleteTenant(Map<String, String> headers,
Handler<AsyncResult<Response>> handlers, Context context) {
context.runOnContext(v -> {
try {
String tenantId = TenantTool.calculateTenantId( headers.get(ClientGenerator.OKAPI_HEADER_TENANT) );
log.info("sending... deleteTenant for " + tenantId);
tenantExists(context, tenantId,
h -> {
boolean exists = false;
if(h.succeeded()){
exists = h.result();
if(!exists){
handlers.handle(failedFuture(DeleteTenantResponse.
respond400WithTextPlain("Tenant does not exist: " + tenantId)));
log.error("Can not delete. Tenant does not exist: " + tenantId);
return;
}
else{
log.info("Deleting tenant " + tenantId);
}
}
else{
handlers.handle(io.vertx.core.Future.failedFuture(h.cause().getMessage()));
log.error(h.cause().getMessage(), h.cause());
return;
}
String sqlFile = null;
try {
SchemaMaker sMaker = new SchemaMaker(tenantId, PostgresClient.getModuleName(),
TenantOperation.DELETE, null, null);
sqlFile = sMaker.generateDDL();
} catch (Exception e1) {
handlers.handle(io.vertx.core.Future.failedFuture(e1.getMessage()));
log.error(e1.getMessage(), e1);
return;
}
log.info("Attempting to run delete script for: " + tenantId);
log.debug("GENERATED SCHEMA " + sqlFile);
/* connect as user in postgres-conf.json file (super user) - so that all commands will be available */
postgresClient(context).runSQLFile(sqlFile, true,
reply -> {
try {
String res = "";
if(reply.succeeded()){
res = new JsonArray(reply.result()).encodePrettily();
if(reply.result().size() > 0){
log.error("Unable to run the following commands during tenant delete: ");
reply.result().forEach(System.out::println);
handlers.handle(failedFuture(DeleteTenantResponse.respond400WithTextPlain(res)));
}
else {
OutStream os = new OutStream();
os.setData(res);
handlers.handle(io.vertx.core.Future.succeededFuture(DeleteTenantResponse.respond204()));
}
}
else {
log.error(reply.cause().getMessage(), reply.cause());
handlers.handle(failedFuture(DeleteTenantResponse
.respond500WithTextPlain(reply.cause().getMessage())));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
handlers.handle(failedFuture(DeleteTenantResponse
.respond500WithTextPlain(e.getMessage())));
}
});
});
} catch (Exception e) {
log.error(e.getMessage(), e);
handlers.handle(failedFuture(DeleteTenantResponse
.respond500WithTextPlain(e.getMessage())));
}
});
}
Future<Boolean> tenantExists(Context context, String tenantId) {
Promise<Boolean> promise = Promise.promise();
tenantExists(context, tenantId, promise.future());
return promise.future();
}
void tenantExists(Context context, String tenantId, Handler<AsyncResult<Boolean>> handler){
/* connect as user in postgres-conf.json file (super user) - so that all commands will be available */
postgresClient(context).select(
"SELECT EXISTS(SELECT 1 FROM pg_namespace WHERE nspname = '"+ PostgresClient.convertToPsqlStandard(tenantId) +"');",
reply -> {
try {
if(reply.succeeded()){
handler.handle(io.vertx.core.Future.succeededFuture(reply.result().iterator().next().getBoolean(0)));
}
else {
log.error(reply.cause().getMessage(), reply.cause());
handler.handle(io.vertx.core.Future.failedFuture(reply.cause().getMessage()));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
handler.handle(io.vertx.core.Future.failedFuture(e.getMessage()));
}
});
}
@Validate
@Override
public void getTenant(Map<String, String> headers, Handler<AsyncResult<Response>> handlers,
Context context) {
context.runOnContext(v -> {
try {
String tenantId = TenantTool.calculateTenantId( headers.get(ClientGenerator.OKAPI_HEADER_TENANT) );
log.info("sending... getTenant for " + tenantId);
tenantExists(context, tenantId, res -> {
boolean exists = false;
if(res.succeeded()){
exists = res.result();
handlers.handle(io.vertx.core.Future.succeededFuture(GetTenantResponse.respond200WithTextPlain(String.valueOf(
exists))));
}
else{
log.error(res.cause().getMessage(), res.cause());
handlers.handle(failedFuture(GetTenantResponse
.respond500WithTextPlain(res.cause().getMessage())));
}
});
} catch (Exception e) {
log.error(e.getMessage(), e);
handlers.handle(failedFuture(GetTenantResponse
.respond500WithTextPlain(e.getMessage())));
}
});
}
/**
* @return previous Schema from rmb_internal.jsonb->>'schemaJson', or null if not exist.
*/
Future<Schema> previousSchema(Context context, String tenantId, boolean tenantExists) {
Promise<Schema> promise = Promise.promise();
if (! tenantExists) {
promise.complete(null);
return promise.future();
}
String sql = "SELECT jsonb->>'schemaJson' " +
"FROM " + PostgresClient.convertToPsqlStandard(tenantId) + ".rmb_internal";
postgresClient(context).selectSingle(sql, select -> {
if (select.failed()) {
promise.fail(select.cause());
return;
}
try {
Row row = select.result();
String schemaString = row == null ? null : row.getString(0);
if (schemaString == null) {
promise.complete(null);
return;
}
Schema schema = ObjectMapperTool.getMapper().readValue(schemaString, Schema.class);
promise.complete(schema);
} catch (Exception e) {
promise.fail(e);
}
});
return promise.future();
}
String getTablePath() {
return TABLE_JSON;
}
public static class NoSchemaJsonException extends RuntimeException {
}
/**
* @param tenantExists false for initial installation, true for upgrading
* @param tenantAttributes parameters like module version that may influence generated SQL
* @param previousSchema schema to upgrade from, may be null if unknown and on initial install
* @return the SQL commands to create or upgrade the tenant's schema
* @throws NoSchemaJsonException when templates/db_scripts/schema.json doesn't exist
* @throws TemplateException when processing templates/db_scripts/schema.json fails
*/
public String sqlFile(String tenantId, boolean tenantExists, TenantAttributes tenantAttributes,
Schema previousSchema) throws IOException, TemplateException {
InputStream tableInput = TenantAPI.class.getClassLoader().getResourceAsStream(getTablePath());
if (tableInput == null) {
log.info("Could not find templates/db_scripts/schema.json , "
+ " RMB will not run any scripts for " + tenantId);
throw new NoSchemaJsonException();
}
TenantOperation op = TenantOperation.CREATE;
String previousVersion = null;
String newVersion = tenantAttributes == null ? null : tenantAttributes.getModuleTo();
if (tenantExists) {
op = TenantOperation.UPDATE;
if (tenantAttributes != null) {
previousVersion = tenantAttributes.getModuleFrom();
}
}
SchemaMaker sMaker = new SchemaMaker(tenantId, PostgresClient.getModuleName(), op, previousVersion, newVersion);
String tableInputStr = IOUtils.toString(tableInput, StandardCharsets.UTF_8);
sMaker.setSchemaJson(tableInputStr);
Schema schema = ObjectMapperTool.getMapper().readValue(tableInputStr, Schema.class);
sMaker.setSchema(schema);
sMaker.setPreviousSchema(previousSchema);
String sqlFile = sMaker.generateDDL();
log.debug("GENERATED SCHEMA " + sqlFile);
return sqlFile;
}
private Future<String> sqlFile(Context context, String tenantId, TenantAttributes tenantAttributes,
boolean tenantExists) {
return previousSchema(context, tenantId, tenantExists)
.compose(previousSchema -> {
try {
String sqlFile = sqlFile(tenantId, tenantExists, tenantAttributes, previousSchema);
return Future.succeededFuture(sqlFile);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (TemplateException e) { // checked exception from main.tpl parsing
throw new IllegalArgumentException(e);
}
});
}
/**
* @param tenantAttributes parameters like module version that may influence generated SQL
* @return the SQL commands to create or upgrade the tenant's schema
* @throws NoSchemaJsonException when templates/db_scripts/schema.json doesn't exist
* @throws TemplateException when processing templates/db_scripts/schema.json fails
*/
public Future<String> sqlFile(Context context, String tenantId, TenantAttributes tenantAttributes) {
return tenantExists(context, tenantId)
.compose(tenantExists -> sqlFile(context, tenantId, tenantAttributes, tenantExists));
}
/**
* Installs or upgrades a module for a tenant.
*
* <p>The <code>handler</code> signals an error with a failing result and a {@link ResponseException}.
*
* @see <a href="https://github.com/folio-org/raml-module-builder#extending-the-tenant-init">Extending the Tenant Init</a>
* for usage examples
*/
@Validate
@Override
public void postTenant(TenantAttributes tenantAttributes, Map<String, String> headers,
Handler<AsyncResult<Response>> handler, Context context) {
String tenantId = TenantTool.tenantId(headers);
log.info("sending... postTenant for " + tenantId);
if (tenantAttributes != null) {
log.debug("upgrade from " + tenantAttributes.getModuleFrom() + " to " + tenantAttributes.getModuleTo());
}
Future<Boolean> tenantExistsFuture = tenantExists(context, tenantId);
tenantExistsFuture
.compose(tenantExists -> sqlFile(context, tenantId, tenantAttributes, tenantExists))
.compose(sqlFile -> postgresClient(context).runSQLFile(sqlFile, true))
.map(failedStatements -> {
String jsonListOfFailures = new JsonArray(failedStatements).encodePrettily();
if (! failedStatements.isEmpty()) {
return PostTenantResponse.respond400WithTextPlain(jsonListOfFailures);
}
boolean tenantExists = tenantExistsFuture.result();
return tenantExists
? PostTenantResponse.respond200WithApplicationJson(jsonListOfFailures)
: PostTenantResponse.respond201WithApplicationJson(jsonListOfFailures);
})
.onFailure(e -> {
if (e instanceof NoSchemaJsonException) {
handler.handle(Future.succeededFuture(PostTenantResponse.respond204()));
return;
}
log.error(e.getMessage(), e);
String text = e.getMessage() + "\n" + ExceptionUtils.getStackTrace(e);
Response response = PostTenantResponse.respond500WithTextPlain(text);
handler.handle(failedFuture(response));
})
.onSuccess(response -> {
if (response.getStatus() >= 300) {
handler.handle(failedFuture(response));
return;
}
handler.handle(Future.succeededFuture(response));
});
}
/**
* @return a failed {@link Future} where the failure cause is a {@link ResponseException}
* containing the {@code response}
*/
static Future<Response> failedFuture(Response response) {
return Future.failedFuture(new ResponseException(response));
}
}
| domain-models-runtime/src/main/java/org/folio/rest/impl/TenantAPI.java | package org.folio.rest.impl;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import javax.ws.rs.core.Response;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.folio.rest.annotations.Validate;
import org.folio.rest.jaxrs.model.TenantAttributes;
import org.folio.rest.jaxrs.resource.Tenant;
import org.folio.rest.persist.PostgresClient;
import org.folio.dbschema.Schema;
import org.folio.rest.persist.ddlgen.SchemaMaker;
import org.folio.dbschema.TenantOperation;
import org.folio.rest.tools.ClientGenerator;
import org.folio.dbschema.ObjectMapperTool;
import org.folio.rest.tools.client.exceptions.ResponseException;
import org.folio.rest.tools.utils.OutStream;
import org.folio.rest.tools.utils.RmbVersion;
import org.folio.rest.tools.utils.TenantTool;
import freemarker.template.TemplateException;
import io.vertx.core.AsyncResult;
import io.vertx.core.Context;
import io.vertx.core.Future;
import io.vertx.core.Handler;
import io.vertx.core.Promise;
import io.vertx.core.json.JsonArray;
import io.vertx.core.logging.Logger;
import io.vertx.core.logging.LoggerFactory;
import io.vertx.sqlclient.Row;
/**
* @author shale
*
*/
public class TenantAPI implements Tenant {
public static final String TABLE_JSON = "templates/db_scripts/schema.json";
private static final Logger log = LoggerFactory.getLogger(TenantAPI.class);
PostgresClient postgresClient(Context context) {
return PostgresClient.getInstance(context.owner());
}
@Validate
@Override
public void deleteTenant(Map<String, String> headers,
Handler<AsyncResult<Response>> handlers, Context context) {
context.runOnContext(v -> {
try {
String tenantId = TenantTool.calculateTenantId( headers.get(ClientGenerator.OKAPI_HEADER_TENANT) );
log.info("sending... deleteTenant for " + tenantId);
tenantExists(context, tenantId,
h -> {
boolean exists = false;
if(h.succeeded()){
exists = h.result();
if(!exists){
handlers.handle(failedFuture(DeleteTenantResponse.
respond400WithTextPlain("Tenant does not exist: " + tenantId)));
log.error("Can not delete. Tenant does not exist: " + tenantId);
return;
}
else{
log.info("Deleting tenant " + tenantId);
}
}
else{
handlers.handle(io.vertx.core.Future.failedFuture(h.cause().getMessage()));
log.error(h.cause().getMessage(), h.cause());
return;
}
String sqlFile = null;
try {
SchemaMaker sMaker = new SchemaMaker(tenantId, PostgresClient.getModuleName(),
TenantOperation.DELETE, null, RmbVersion.getRmbVersion());
sqlFile = sMaker.generateDDL();
} catch (Exception e1) {
handlers.handle(io.vertx.core.Future.failedFuture(e1.getMessage()));
log.error(e1.getMessage(), e1);
return;
}
log.info("Attempting to run delete script for: " + tenantId);
log.debug("GENERATED SCHEMA " + sqlFile);
/* connect as user in postgres-conf.json file (super user) - so that all commands will be available */
postgresClient(context).runSQLFile(sqlFile, true,
reply -> {
try {
String res = "";
if(reply.succeeded()){
res = new JsonArray(reply.result()).encodePrettily();
if(reply.result().size() > 0){
log.error("Unable to run the following commands during tenant delete: ");
reply.result().forEach(System.out::println);
handlers.handle(failedFuture(DeleteTenantResponse.respond400WithTextPlain(res)));
}
else {
OutStream os = new OutStream();
os.setData(res);
handlers.handle(io.vertx.core.Future.succeededFuture(DeleteTenantResponse.respond204()));
}
}
else {
log.error(reply.cause().getMessage(), reply.cause());
handlers.handle(failedFuture(DeleteTenantResponse
.respond500WithTextPlain(reply.cause().getMessage())));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
handlers.handle(failedFuture(DeleteTenantResponse
.respond500WithTextPlain(e.getMessage())));
}
});
});
} catch (Exception e) {
log.error(e.getMessage(), e);
handlers.handle(failedFuture(DeleteTenantResponse
.respond500WithTextPlain(e.getMessage())));
}
});
}
Future<Boolean> tenantExists(Context context, String tenantId) {
Promise<Boolean> promise = Promise.promise();
tenantExists(context, tenantId, promise.future());
return promise.future();
}
void tenantExists(Context context, String tenantId, Handler<AsyncResult<Boolean>> handler){
/* connect as user in postgres-conf.json file (super user) - so that all commands will be available */
postgresClient(context).select(
"SELECT EXISTS(SELECT 1 FROM pg_namespace WHERE nspname = '"+ PostgresClient.convertToPsqlStandard(tenantId) +"');",
reply -> {
try {
if(reply.succeeded()){
handler.handle(io.vertx.core.Future.succeededFuture(reply.result().iterator().next().getBoolean(0)));
}
else {
log.error(reply.cause().getMessage(), reply.cause());
handler.handle(io.vertx.core.Future.failedFuture(reply.cause().getMessage()));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
handler.handle(io.vertx.core.Future.failedFuture(e.getMessage()));
}
});
}
@Validate
@Override
public void getTenant(Map<String, String> headers, Handler<AsyncResult<Response>> handlers,
Context context) {
context.runOnContext(v -> {
try {
String tenantId = TenantTool.calculateTenantId( headers.get(ClientGenerator.OKAPI_HEADER_TENANT) );
log.info("sending... getTenant for " + tenantId);
tenantExists(context, tenantId, res -> {
boolean exists = false;
if(res.succeeded()){
exists = res.result();
handlers.handle(io.vertx.core.Future.succeededFuture(GetTenantResponse.respond200WithTextPlain(String.valueOf(
exists))));
}
else{
log.error(res.cause().getMessage(), res.cause());
handlers.handle(failedFuture(GetTenantResponse
.respond500WithTextPlain(res.cause().getMessage())));
}
});
} catch (Exception e) {
log.error(e.getMessage(), e);
handlers.handle(failedFuture(GetTenantResponse
.respond500WithTextPlain(e.getMessage())));
}
});
}
/**
* @return previous Schema from rmb_internal.jsonb->>'schemaJson', or null if not exist.
*/
Future<Schema> previousSchema(Context context, String tenantId, boolean tenantExists) {
Promise<Schema> promise = Promise.promise();
if (! tenantExists) {
promise.complete(null);
return promise.future();
}
String sql = "SELECT jsonb->>'schemaJson' " +
"FROM " + PostgresClient.convertToPsqlStandard(tenantId) + ".rmb_internal";
postgresClient(context).selectSingle(sql, select -> {
if (select.failed()) {
promise.fail(select.cause());
return;
}
try {
Row row = select.result();
String schemaString = row == null ? null : row.getString(0);
if (schemaString == null) {
promise.complete(null);
return;
}
Schema schema = ObjectMapperTool.getMapper().readValue(schemaString, Schema.class);
promise.complete(schema);
} catch (Exception e) {
promise.fail(e);
}
});
return promise.future();
}
String getTablePath() {
return TABLE_JSON;
}
public static class NoSchemaJsonException extends RuntimeException {
}
/**
* @param tenantExists false for initial installation, true for upgrading
* @param tenantAttributes parameters like module version that may influence generated SQL
* @param previousSchema schema to upgrade from, may be null if unknown and on initial install
* @return the SQL commands to create or upgrade the tenant's schema
* @throws NoSchemaJsonException when templates/db_scripts/schema.json doesn't exist
* @throws TemplateException when processing templates/db_scripts/schema.json fails
*/
public String sqlFile(String tenantId, boolean tenantExists, TenantAttributes tenantAttributes,
Schema previousSchema) throws IOException, TemplateException {
InputStream tableInput = TenantAPI.class.getClassLoader().getResourceAsStream(getTablePath());
if (tableInput == null) {
log.info("Could not find templates/db_scripts/schema.json , "
+ " RMB will not run any scripts for " + tenantId);
throw new NoSchemaJsonException();
}
TenantOperation op = TenantOperation.CREATE;
String previousVersion = null;
String newVersion = tenantAttributes == null ? null : tenantAttributes.getModuleTo();
if (tenantExists) {
op = TenantOperation.UPDATE;
if (tenantAttributes != null) {
previousVersion = tenantAttributes.getModuleFrom();
}
}
SchemaMaker sMaker = new SchemaMaker(tenantId, PostgresClient.getModuleName(), op, previousVersion, newVersion);
String tableInputStr = IOUtils.toString(tableInput, StandardCharsets.UTF_8);
sMaker.setSchemaJson(tableInputStr);
Schema schema = ObjectMapperTool.getMapper().readValue(tableInputStr, Schema.class);
sMaker.setSchema(schema);
sMaker.setPreviousSchema(previousSchema);
String sqlFile = sMaker.generateDDL();
log.debug("GENERATED SCHEMA " + sqlFile);
return sqlFile;
}
private Future<String> sqlFile(Context context, String tenantId, TenantAttributes tenantAttributes,
boolean tenantExists) {
return previousSchema(context, tenantId, tenantExists)
.compose(previousSchema -> {
try {
String sqlFile = sqlFile(tenantId, tenantExists, tenantAttributes, previousSchema);
return Future.succeededFuture(sqlFile);
} catch (IOException e) {
throw new UncheckedIOException(e);
} catch (TemplateException e) { // checked exception from main.tpl parsing
throw new IllegalArgumentException(e);
}
});
}
/**
* @param tenantAttributes parameters like module version that may influence generated SQL
* @return the SQL commands to create or upgrade the tenant's schema
* @throws NoSchemaJsonException when templates/db_scripts/schema.json doesn't exist
* @throws TemplateException when processing templates/db_scripts/schema.json fails
*/
public Future<String> sqlFile(Context context, String tenantId, TenantAttributes tenantAttributes) {
return tenantExists(context, tenantId)
.compose(tenantExists -> sqlFile(context, tenantId, tenantAttributes, tenantExists));
}
/**
* Installs or upgrades a module for a tenant.
*
* <p>The <code>handler</code> signals an error with a failing result and a {@link ResponseException}.
*
* @see <a href="https://github.com/folio-org/raml-module-builder#extending-the-tenant-init">Extending the Tenant Init</a>
* for usage examples
*/
@Validate
@Override
public void postTenant(TenantAttributes tenantAttributes, Map<String, String> headers,
Handler<AsyncResult<Response>> handler, Context context) {
String tenantId = TenantTool.tenantId(headers);
log.info("sending... postTenant for " + tenantId);
if (tenantAttributes != null) {
log.debug("upgrade from " + tenantAttributes.getModuleFrom() + " to " + tenantAttributes.getModuleTo());
}
Future<Boolean> tenantExistsFuture = tenantExists(context, tenantId);
tenantExistsFuture
.compose(tenantExists -> sqlFile(context, tenantId, tenantAttributes, tenantExists))
.compose(sqlFile -> postgresClient(context).runSQLFile(sqlFile, true))
.map(failedStatements -> {
String jsonListOfFailures = new JsonArray(failedStatements).encodePrettily();
if (! failedStatements.isEmpty()) {
return PostTenantResponse.respond400WithTextPlain(jsonListOfFailures);
}
boolean tenantExists = tenantExistsFuture.result();
return tenantExists
? PostTenantResponse.respond200WithApplicationJson(jsonListOfFailures)
: PostTenantResponse.respond201WithApplicationJson(jsonListOfFailures);
})
.onFailure(e -> {
if (e instanceof NoSchemaJsonException) {
handler.handle(Future.succeededFuture(PostTenantResponse.respond204()));
return;
}
log.error(e.getMessage(), e);
String text = e.getMessage() + "\n" + ExceptionUtils.getStackTrace(e);
Response response = PostTenantResponse.respond500WithTextPlain(text);
handler.handle(failedFuture(response));
})
.onSuccess(response -> {
if (response.getStatus() >= 300) {
handler.handle(failedFuture(response));
return;
}
handler.handle(Future.succeededFuture(response));
});
}
/**
* @return a failed {@link Future} where the failure cause is a {@link ResponseException}
* containing the {@code response}
*/
static Future<Response> failedFuture(Response response) {
return Future.failedFuture(new ResponseException(response));
}
}
| On delete don't set any module versions.
| domain-models-runtime/src/main/java/org/folio/rest/impl/TenantAPI.java | On delete don't set any module versions. |
|
Java | apache-2.0 | 3e4a31730ce74e59778c02f596bbec60c27093bc | 0 | md5555/android_packages_services_Telephony | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.services.telephony;
import android.net.Uri;
import android.os.Bundle;
import android.telecomm.CallInfo;
import android.telecomm.CallState;
import android.util.Log;
import com.android.internal.telephony.Call;
import com.android.internal.telephony.CallStateException;
import com.android.internal.telephony.Connection;
import com.android.internal.telephony.Phone;
import com.android.phone.Constants;
/**
* The parent class for PSTN-based call services. Handles shared functionality between all PSTN
* call services.
*/
public abstract class PstnCallService extends BaseTelephonyCallService {
private static final String TAG = PstnCallService.class.getSimpleName();
private static final boolean DBG = Log.isLoggable(TAG, Log.DEBUG);
/** {@inheritDoc} */
@Override
public final void call(CallInfo callInfo) {
startCallWithPhone(getPhone(), callInfo);
}
/**
* Looks for a new incoming call and if one is found, tells Telecomm to associate the incoming
* call with the specified call ID.
*
* {@inheritDoc}
*/
@Override
public final void setIncomingCallId(String callId, Bundle extras) {
if (DBG) {
Log.d(TAG, "setIncomingCallId: " + callId);
}
Phone phone = getPhone();
Call call = getPhone().getRingingCall();
// The ringing call is always not-null, check if it is truly ringing by checking its state.
if (call.getState().isRinging()) {
Connection connection = call.getEarliestConnection();
if (CallRegistrar.isConnectionRegistered(connection)) {
Log.e(TAG, "Cannot set incoming call ID, ringing connection already registered.");
} else {
// Create and register a new call connection.
TelephonyCallConnection callConnection =
new TelephonyCallConnection(mCallServiceAdapter, callId, connection);
CallRegistrar.register(callId, callConnection);
// Address can be null for blocked calls.
String address = connection.getAddress();
if (address == null) {
address = "";
}
// Notify Telecomm of the incoming call.
Uri handle = Uri.fromParts(Constants.SCHEME_TEL, address, null);
CallInfo callInfo = new CallInfo(callId, CallState.RINGING, handle);
mCallServiceAdapter.notifyIncomingCall(callInfo);
}
} else {
Log.e(TAG, "Found no ringing call, call state: " + call.getState());
}
}
/** {@inheritDoc} */
@Override
public void answer(String callId) {
// TODO(santoscordon): Tons of hairy logic is missing here around multiple active calls on
// CDMA devices. See {@link CallManager.acceptCall}.
Log.i(TAG, "answer: " + callId);
if (isValidRingingCall(callId)) {
try {
getPhone().acceptCall();
} catch (CallStateException e) {
Log.e(TAG, "Failed to accept call " + callId, e);
}
}
}
/** {@inheritDoc} */
@Override
public void reject(String callId) {
Log.i(TAG, "reject: " + callId);
if (isValidRingingCall(callId)) {
try {
getPhone().rejectCall();
} catch (CallStateException e) {
Log.e(TAG, "Failed to reject call " + callId, e);
}
}
}
/**
* @return The current phone object behind this call service.
*/
protected abstract Phone getPhone();
/**
* Checks to see if the specified call ID corresponds to an active incoming call. Returns false
* if there is no association between the specified call ID and an actual call, or if the
* associated call is not incoming (See {@link Call.State#isRinging}).
*
* @param callId The ID of the call.
*/
private boolean isValidRingingCall(String callId) {
TelephonyCallConnection callConnection = CallRegistrar.get(callId);
if (callConnection == null) {
if (DBG) {
Log.d(TAG, "Unknown call ID while testing for a ringing call.");
}
} else {
Phone phone = getPhone();
Call ringingCall = phone.getRingingCall();
// The ringingCall object is always not-null so we have to check its current state.
if (ringingCall.getState().isRinging()) {
Connection connection = callConnection.getOriginalConnection();
if (ringingCall.getEarliestConnection() == connection) {
// The ringing connection is the same one for this call. We have a match!
return true;
} else {
Log.w(TAG, "A ringing connection exists, but it is not the same connection.");
}
} else {
Log.i(TAG, "There is no longer a ringing call.");
}
}
return false;
}
}
| src/com/android/services/telephony/PstnCallService.java | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.services.telephony;
import android.net.Uri;
import android.os.Bundle;
import android.telecomm.CallInfo;
import android.telecomm.CallState;
import android.util.Log;
import com.android.internal.telephony.Call;
import com.android.internal.telephony.CallStateException;
import com.android.internal.telephony.Connection;
import com.android.internal.telephony.Phone;
import com.android.phone.Constants;
/**
* The parent class for PSTN-based call services. Handles shared functionality between all PSTN
* call services.
*/
public abstract class PstnCallService extends BaseTelephonyCallService {
private static final String TAG = PstnCallService.class.getSimpleName();
private static final boolean DBG = Log.isLoggable(TAG, Log.DEBUG);
/** {@inheritDoc} */
@Override
public final void call(CallInfo callInfo) {
startCallWithPhone(getPhone(), callInfo);
}
/**
* Looks for a new incoming call and if one is found, tells Telecomm to associate the incoming
* call with the specified call ID.
*
* {@inheritDoc}
*/
@Override
public final void setIncomingCallId(String callId, Bundle extras) {
if (DBG) {
Log.d(TAG, "setIncomingCallId: " + callId);
}
Phone phone = getPhone();
Call call = getPhone().getRingingCall();
// The ringing call is always not-null, check if it is truly ringing by checking its state.
if (call.getState().isRinging()) {
Connection connection = call.getEarliestConnection();
if (CallRegistrar.isConnectionRegistered(connection)) {
Log.e(TAG, "Cannot set incoming call ID, ringing connection already registered.");
} else {
// Create and register a new call connection.
TelephonyCallConnection callConnection =
new TelephonyCallConnection(mCallServiceAdapter, callId, connection);
CallRegistrar.register(callId, callConnection);
// Notify Telecomm of the incoming call.
Uri handle = Uri.fromParts(Constants.SCHEME_TEL, connection.getAddress(), null);
CallInfo callInfo = new CallInfo(callId, CallState.RINGING, handle);
mCallServiceAdapter.notifyIncomingCall(callInfo);
}
} else {
Log.e(TAG, "Found no ringing call, call state: " + call.getState());
}
}
/** {@inheritDoc} */
@Override
public void answer(String callId) {
// TODO(santoscordon): Tons of hairy logic is missing here around multiple active calls on
// CDMA devices. See {@link CallManager.acceptCall}.
Log.i(TAG, "answer: " + callId);
if (isValidRingingCall(callId)) {
try {
getPhone().acceptCall();
} catch (CallStateException e) {
Log.e(TAG, "Failed to accept call " + callId, e);
}
}
}
/** {@inheritDoc} */
@Override
public void reject(String callId) {
Log.i(TAG, "reject: " + callId);
if (isValidRingingCall(callId)) {
try {
getPhone().rejectCall();
} catch (CallStateException e) {
Log.e(TAG, "Failed to reject call " + callId, e);
}
}
}
/**
* @return The current phone object behind this call service.
*/
protected abstract Phone getPhone();
/**
* Checks to see if the specified call ID corresponds to an active incoming call. Returns false
* if there is no association between the specified call ID and an actual call, or if the
* associated call is not incoming (See {@link Call.State#isRinging}).
*
* @param callId The ID of the call.
*/
private boolean isValidRingingCall(String callId) {
TelephonyCallConnection callConnection = CallRegistrar.get(callId);
if (callConnection == null) {
if (DBG) {
Log.d(TAG, "Unknown call ID while testing for a ringing call.");
}
} else {
Phone phone = getPhone();
Call ringingCall = phone.getRingingCall();
// The ringingCall object is always not-null so we have to check its current state.
if (ringingCall.getState().isRinging()) {
Connection connection = callConnection.getOriginalConnection();
if (ringingCall.getEarliestConnection() == connection) {
// The ringing connection is the same one for this call. We have a match!
return true;
} else {
Log.w(TAG, "A ringing connection exists, but it is not the same connection.");
}
} else {
Log.i(TAG, "There is no longer a ringing call.");
}
}
return false;
}
}
| Fix crash on blocked calls
Bug: 13630786
Change-Id: Id49405d376e62cb5e87deebb193c4988a4773d8e
| src/com/android/services/telephony/PstnCallService.java | Fix crash on blocked calls |
|
Java | apache-2.0 | bceb3e86436a45e273067566d5c92413924173e9 | 0 | Nanoware/Terasology,MovingBlocks/Terasology,Nanoware/Terasology,Nanoware/Terasology,MovingBlocks/Terasology,MovingBlocks/Terasology,Malanius/Terasology,Malanius/Terasology | /*
* Copyright 2017 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.world;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.config.Config;
import org.terasology.config.RenderingConfig;
import org.terasology.context.Context;
import org.terasology.engine.GameEngine;
import org.terasology.engine.modes.StateMainMenu;
import org.terasology.engine.module.ModuleManager;
import org.terasology.engine.module.rendering.RenderingModuleManager;
import org.terasology.engine.subsystem.DisplayDevice;
import org.terasology.engine.subsystem.lwjgl.GLBufferPool;
import org.terasology.engine.subsystem.lwjgl.LwjglGraphics;
import org.terasology.logic.console.Console;
import org.terasology.logic.console.commandSystem.MethodCommand;
import org.terasology.logic.console.commandSystem.annotations.Command;
import org.terasology.logic.console.commandSystem.annotations.CommandParam;
import org.terasology.logic.permission.PermissionManager;
import org.terasology.logic.players.LocalPlayerSystem;
import org.terasology.math.JomlUtil;
import org.terasology.math.TeraMath;
import org.terasology.math.geom.Vector3f;
import org.terasology.math.geom.Vector3i;
import org.terasology.rendering.ShaderManager;
import org.terasology.rendering.assets.material.Material;
import org.terasology.rendering.backdrop.BackdropProvider;
import org.terasology.rendering.cameras.OpenVRStereoCamera;
import org.terasology.rendering.cameras.PerspectiveCamera;
import org.terasology.rendering.cameras.SubmersibleCamera;
import org.terasology.engine.module.rendering.RenderingModuleRegistry;
import org.terasology.rendering.dag.Node;
import org.terasology.rendering.dag.ModuleRendering;
import org.terasology.rendering.dag.RenderGraph;
import org.terasology.rendering.dag.RenderPipelineTask;
import org.terasology.rendering.dag.RenderTaskListGenerator;
import org.terasology.rendering.dag.stateChanges.SetViewportToSizeOf;
import org.terasology.rendering.opengl.FBO;
import org.terasology.rendering.opengl.ScreenGrabber;
import org.terasology.rendering.opengl.fbms.DisplayResolutionDependentFbo;
import org.terasology.rendering.openvrprovider.OpenVRProvider;
import org.terasology.rendering.world.viewDistance.ViewDistance;
import org.terasology.utilities.Assets;
import org.terasology.world.WorldProvider;
import org.terasology.world.chunks.ChunkProvider;
import static org.lwjgl.opengl.GL11.GL_CULL_FACE;
import static org.lwjgl.opengl.GL11.glDisable;
import static org.lwjgl.opengl.GL11.glViewport;
/**
* Renders the 3D world, including background, overlays and first person/in hand objects. 2D UI elements are dealt with elsewhere.
* <p>
* This implementation includes support for OpenVR, through which HTC Vive and Oculus Rift is supported.
* <p>
* This implementation works closely with a number of support objects, in particular:
* <p>
* TODO: update this section to include new, relevant objects
* - a RenderableWorld instance, providing acceleration structures caching blocks requiring different rendering treatments<br/>
*/
public final class WorldRendererImpl implements WorldRenderer {
/*
* Presumably, the eye height should be context.get(Config.class).getPlayer().getEyeHeight() above the ground plane.
* It's not, so for now, we use this factor to adjust for the disparity.
*/
private static final Logger logger = LoggerFactory.getLogger(WorldRendererImpl.class);
private static final float GROUND_PLANE_HEIGHT_DISPARITY = -0.7f;
private RenderGraph renderGraph;
private RenderingModuleRegistry renderingModuleRegistry;
private boolean isFirstRenderingStageForCurrentFrame;
private final RenderQueuesHelper renderQueues;
private final Context context;
private final BackdropProvider backdropProvider;
private final WorldProvider worldProvider;
private final RenderableWorld renderableWorld;
private final ShaderManager shaderManager;
private final SubmersibleCamera playerCamera;
private final OpenVRProvider vrProvider;
private float timeSmoothedMainLightIntensity;
private RenderingStage currentRenderingStage;
private float millisecondsSinceRenderingStart;
private float secondsSinceLastFrame;
private int statChunkMeshEmpty;
private int statChunkNotReady;
private int statRenderedTriangles;
private final RenderingConfig renderingConfig;
private final Console console;
private RenderTaskListGenerator renderTaskListGenerator;
private boolean requestedTaskListRefresh;
private List<RenderPipelineTask> renderPipelineTaskList;
private DisplayResolutionDependentFbo displayResolutionDependentFbo;
/**
* Instantiates a WorldRenderer implementation.
* <p>
* This particular implementation works as deferred shader. The scene is rendered multiple times per frame
* in a number of separate passes (each stored in GPU buffers) and the passes are combined throughout the
* rendering pipeline to calculate per-pixel lighting and other effects.
* <p>
* Transparencies are handled through alpha rejection (i.e. ground plants) and alpha-based blending.
* An exception to this is water, which is handled separately to allow for reflections and refractions, if enabled.
* <p>
* By the time it is fully instantiated this implementation is already connected to all the support objects
* it requires and is ready to render via the render(RenderingStage) method.
*
* @param context a context object, to obtain instances of classes such as the rendering config.
* @param bufferPool a GLBufferPool, to be passed to the RenderableWorld instance used by this implementation.
*/
public WorldRendererImpl(Context context, GLBufferPool bufferPool) {
this.context = context;
renderGraph = new RenderGraph(context);
this.worldProvider = context.get(WorldProvider.class);
this.backdropProvider = context.get(BackdropProvider.class);
this.renderingConfig = context.get(Config.class).getRendering();
this.shaderManager = context.get(ShaderManager.class);
// TODO: Instantiate the VR provider at a more reasonable location, and just obtain it via context here.
vrProvider = OpenVRProvider.getInstance();
if (renderingConfig.isVrSupport()) {
context.put(OpenVRProvider.class, vrProvider);
// If vrProvider.init() returns false, this means that we are unable to initialize VR hardware for some
// reason (for example, no HMD is connected). In that case, even though the configuration requests
// vrSupport, we fall back on rendering to the main display. The reason for init failure can be read from
// the log.
if (vrProvider.init()) {
playerCamera = new OpenVRStereoCamera(vrProvider, worldProvider, renderingConfig);
/*
* The origin of OpenVR's coordinate system lies on the ground of the user. We have to move this origin
* such that the ground plane of the rendering system and the ground plane of the room the VR user is
* in match.
*/
vrProvider.getState().setGroundPlaneYOffset(
GROUND_PLANE_HEIGHT_DISPARITY - context.get(Config.class).getPlayer().getEyeHeight());
currentRenderingStage = RenderingStage.LEFT_EYE;
} else {
playerCamera = new PerspectiveCamera(worldProvider, renderingConfig, context.get(DisplayDevice.class));
currentRenderingStage = RenderingStage.MONO;
}
} else {
playerCamera = new PerspectiveCamera(worldProvider, renderingConfig, context.get(DisplayDevice.class));
currentRenderingStage = RenderingStage.MONO;
}
// TODO: won't need localPlayerSystem here once camera is in the ES proper
LocalPlayerSystem localPlayerSystem = context.get(LocalPlayerSystem.class);
localPlayerSystem.setPlayerCamera(playerCamera);
renderableWorld = new RenderableWorldImpl(worldProvider, context.get(ChunkProvider.class), bufferPool, playerCamera);
renderQueues = renderableWorld.getRenderQueues();
initRenderingSupport();
initRenderGraph();
initRenderingModules();
console = context.get(Console.class);
MethodCommand.registerAvailable(this, console, context);
}
private void initRenderingSupport() {
ScreenGrabber screenGrabber = new ScreenGrabber(context);
context.put(ScreenGrabber.class, screenGrabber);
displayResolutionDependentFbo = new DisplayResolutionDependentFbo(context.get(Config.class).getRendering(), screenGrabber, context.get(DisplayDevice.class));
context.put(DisplayResolutionDependentFbo.class, displayResolutionDependentFbo);
shaderManager.initShaders();
context.put(WorldRenderer.class, this);
context.put(RenderQueuesHelper.class, renderQueues);
context.put(RenderableWorld.class, renderableWorld);
}
private void initRenderGraph() {
context.put(RenderGraph.class, renderGraph);
renderTaskListGenerator = new RenderTaskListGenerator();
context.put(RenderTaskListGenerator.class, renderTaskListGenerator);
addDummyNodes();
}
private void initRenderingModules() {
renderingModuleRegistry = context.get(RenderingModuleManager.class).getRegistry();
// registry not populated by new ModuleRendering instances in UI, populate now
if (renderingModuleRegistry.getOrderedRenderingModules().isEmpty()) {
renderingModuleRegistry.updateRenderingModulesOrder(context.get(ModuleManager.class).getEnvironment(), context);
if(renderingModuleRegistry.getOrderedRenderingModules().isEmpty()) {
GameEngine gameEngine = context.get(GameEngine.class);
gameEngine.changeState(new StateMainMenu("No rendering module loaded, unable to render. Try enabling BasicRendering."));
}
} else { // registry populated by new ModuleRendering instances in UI
// Switch module's context from gamecreation subcontext to gamerunning context
renderingModuleRegistry.updateModulesContext(context);
}
/*
TODO: work out where to put this.
renderGraph.connect(opaqueObjectsNode, overlaysNode);
renderGraph.connect(opaqueBlocksNode, overlaysNode);
renderGraph.connect(alphaRejectBlocksNode, overlaysNode);
*/
for (ModuleRendering moduleRenderingInstance : renderingModuleRegistry.getOrderedRenderingModules()) {
if (moduleRenderingInstance.isEnabled()) {
logger.info(String.format("\nInitialising rendering class %s from %s module.\n",
moduleRenderingInstance.getClass().getSimpleName(),
moduleRenderingInstance.getProvidingModule()));
moduleRenderingInstance.initialise();
}
}
requestTaskListRefresh();
}
public void addDummyNodes() {
/*
Node blurredAmbientOcclusionNode = new DummyNode ("blurredAmbientOcclusionNode", context);
blurredAmbientOcclusionNode.addOutputFboConnection(1);
renderGraph.addNode(blurredAmbientOcclusionNode);
Node prePostCompositeNode = new DummyNode("prePostCompositeNode", context);
renderGraph.addNode(prePostCompositeNode);
*/
}
@Override
public float getSecondsSinceLastFrame() {
return secondsSinceLastFrame;
}
@Override
public Material getMaterial(String assetId) {
return Assets.getMaterial(assetId).orElseThrow(() ->
new RuntimeException("Failed to resolve required asset: '" + assetId + "'"));
}
@Override
public void onChunkLoaded(Vector3i pos) {
renderableWorld.onChunkLoaded(pos);
}
@Override
public void onChunkUnloaded(Vector3i pos) {
renderableWorld.onChunkUnloaded(pos);
}
@Override
public boolean pregenerateChunks() {
return renderableWorld.pregenerateChunks();
}
@Override
public void update(float deltaInSeconds) {
secondsSinceLastFrame += deltaInSeconds;
}
private void resetStats() {
statChunkMeshEmpty = 0;
statChunkNotReady = 0;
statRenderedTriangles = 0;
}
@Override
public void increaseTrianglesCount(int increase) {
statRenderedTriangles += increase;
}
@Override
public void increaseNotReadyChunkCount(int increase) {
statChunkNotReady += increase;
}
private void preRenderUpdate(RenderingStage renderingStage) {
resetStats();
currentRenderingStage = renderingStage;
if (currentRenderingStage == RenderingStage.MONO || currentRenderingStage == RenderingStage.LEFT_EYE) {
isFirstRenderingStageForCurrentFrame = true;
} else {
isFirstRenderingStageForCurrentFrame = false;
}
// this is done to execute this code block only once per frame
// instead of once per eye in a stereo setup.
if (isFirstRenderingStageForCurrentFrame) {
timeSmoothedMainLightIntensity = TeraMath.lerp(timeSmoothedMainLightIntensity, getMainLightIntensityAt(JomlUtil.from(playerCamera.getPosition())), secondsSinceLastFrame);
playerCamera.update(secondsSinceLastFrame);
renderableWorld.update();
renderableWorld.generateVBOs();
secondsSinceLastFrame = 0;
displayResolutionDependentFbo.update();
millisecondsSinceRenderingStart += secondsSinceLastFrame * 1000; // updates the variable animations are based on.
}
if (currentRenderingStage != RenderingStage.MONO) {
playerCamera.updateFrustum();
}
// this line needs to be here as deep down it relies on the camera's frustrum, updated just above.
renderableWorld.queueVisibleChunks(isFirstRenderingStageForCurrentFrame);
if (requestedTaskListRefresh) {
List<Node> orderedNodes = renderGraph.getNodesInTopologicalOrder();
renderPipelineTaskList = renderTaskListGenerator.generateFrom(orderedNodes);
requestedTaskListRefresh = false;
}
}
/**
* TODO: update javadocs
* This method triggers the execution of the rendering pipeline and, eventually, sends the output to the display
* or to a file, when grabbing a screenshot.
* <p>
* In this particular implementation this method can be called once per frame, when rendering to a standard display,
* or twice, each time with a different rendering stage, when rendering to the head mounted display.
* <p>
* PerformanceMonitor.startActivity/endActivity statements are used in this method and in those it executes,
* to provide statistics regarding the ongoing rendering and its individual steps (i.e. rendering shadows,
* reflections, 2D filters...).
*
* @param renderingStage "MONO" for standard rendering and "LEFT_EYE" or "RIGHT_EYE" for stereoscopic displays.
*/
@Override
public void render(RenderingStage renderingStage) {
// If no rendering module populated renderGraph, throw an exception.
/* if (renderGraph.getNodeMapSize() < 1) {
throw new RuntimeException("Render graph is not ready to render. Did you use a rendering module?");
} */
preRenderUpdate(renderingStage);
// TODO: Add a method here to check wireframe configuration and regenerate "renderPipelineTask" accordingly.
// The following line re-establish OpenGL defaults, so that the nodes/tasks can rely on them.
// A place where Terasology overrides the defaults is LwjglGraphics.initOpenGLParams(), but
// there could be potentially other places, i.e. in the UI code. In the rendering engine we'd like
// to eventually rely on a default OpenGL state.
glDisable(GL_CULL_FACE);
FBO lastUpdatedGBuffer = displayResolutionDependentFbo.getGBufferPair().getLastUpdatedFbo();
glViewport(0, 0, lastUpdatedGBuffer.width(), lastUpdatedGBuffer.height());
// glDisable(GL_DEPTH_TEST);
// glDisable(GL_NORMALIZE); // currently keeping these as they are, until we find where they are used.
// glDepthFunc(GL_LESS);
renderPipelineTaskList.forEach(RenderPipelineTask::process);
// this line re-establish Terasology defaults, so that the rest of the application can rely on them.
LwjglGraphics.initOpenGLParams();
playerCamera.updatePrevViewProjectionMatrix();
}
@Override
public void requestTaskListRefresh() {
requestedTaskListRefresh = true;
}
@Override
public boolean isFirstRenderingStageForCurrentFrame() {
return isFirstRenderingStageForCurrentFrame;
}
/**
* Disposes of support objects used by this implementation.
*/
@Override
public void dispose() {
renderableWorld.dispose();
worldProvider.dispose();
renderGraph.dispose();
// TODO: Shift this to a better place, after a RenderGraph class has been implemented.
SetViewportToSizeOf.disposeDefaultInstance();
}
@Override
public void setViewDistance(ViewDistance viewDistance) {
renderableWorld.updateChunksInProximity(viewDistance);
}
@Override
public float getTimeSmoothedMainLightIntensity() {
return timeSmoothedMainLightIntensity;
}
@Override
public float getRenderingLightIntensityAt(Vector3f pos) {
float rawLightValueSun = worldProvider.getSunlight(pos) / 15.0f;
float rawLightValueBlock = worldProvider.getLight(pos) / 15.0f;
float lightValueSun = (float) Math.pow(BLOCK_LIGHT_SUN_POW, (1.0f - rawLightValueSun) * 16.0) * rawLightValueSun;
lightValueSun *= backdropProvider.getDaylight();
// TODO: Hardcoded factor and value to compensate for daylight tint and night brightness
lightValueSun *= 0.9f;
lightValueSun += 0.05f;
float lightValueBlock = (float) Math.pow(BLOCK_LIGHT_POW, (1.0f - (double) rawLightValueBlock) * 16.0f) * rawLightValueBlock * BLOCK_INTENSITY_FACTOR;
return Math.max(lightValueBlock, lightValueSun);
}
@Override
public float getMainLightIntensityAt(Vector3f position) {
return backdropProvider.getDaylight() * worldProvider.getSunlight(position) / 15.0f;
}
@Override
public float getBlockLightIntensityAt(Vector3f position) {
return worldProvider.getLight(position) / 15.0f;
}
@Override
public String getMetrics() {
String stringToReturn = "";
stringToReturn += renderableWorld.getMetrics();
stringToReturn += "Empty Mesh Chunks: ";
stringToReturn += statChunkMeshEmpty;
stringToReturn += "\n";
stringToReturn += "Unready Chunks: ";
stringToReturn += statChunkNotReady;
stringToReturn += "\n";
stringToReturn += "Rendered Triangles: ";
stringToReturn += statRenderedTriangles;
stringToReturn += "\n";
return stringToReturn;
}
@Override
public float getMillisecondsSinceRenderingStart() {
return millisecondsSinceRenderingStart;
}
@Override
public SubmersibleCamera getActiveCamera() {
return playerCamera;
}
@Override
public RenderingStage getCurrentRenderStage() {
return currentRenderingStage;
}
@Override
public RenderGraph getRenderGraph() {
return renderGraph;
}
/**
* Forces a recompilation of all shaders. This command, backed by Gestalt's monitoring feature,
* allows developers to hot-swap shaders for easy development.
*
* To run the command simply type "recompileShaders" and then press Enter in the console.
*/
@Command(shortDescription = "Forces a recompilation of shaders.", requiredPermission = PermissionManager.NO_PERMISSION)
public void recompileShaders() {
console.addMessage("Recompiling shaders... ", false);
shaderManager.recompileAllShaders();
console.addMessage("done!");
}
/**
* Acts as an interface between the console and the Nodes. All parameters passed to command are redirected to the
* concerned Nodes, which in turn take care of executing them.
*
* Usage:
* dagNodeCommand <nodeUri> <command> <parameters>
*
* Example:
* dagNodeCommand engine:outputToScreenNode setFbo engine:fbo.ssao
*/
@Command(shortDescription = "Debugging command for DAG.", requiredPermission = PermissionManager.NO_PERMISSION)
public void dagNodeCommand(@CommandParam("nodeUri") final String nodeUri, @CommandParam("command") final String command,
@CommandParam(value = "arguments") final String... arguments) {
Node node = renderGraph.findNode(nodeUri);
if (node == null) {
node = renderGraph.findAka(nodeUri);
if (node == null) {
throw new RuntimeException(("No node is associated with URI '" + nodeUri + "'"));
}
}
node.handleCommand(command, arguments);
}
/**
* Redirect output FBO from one node to another's input
*
* Usage:
* dagRedirect <connectionTypeString> <fromNodeUri> <outputFboId> <toNodeUri> <inputFboId>
*
* Example:
* dagRedirect fbo blurredAmbientOcclusion 1 BasicRendering:outputToScreenNode 1
* dagRedirect bufferpair backdrop 1 AdvancedRendering:intermediateHazeNode 1
*/
@Command(shortDescription = "Debugging command for DAG.", requiredPermission = PermissionManager.NO_PERMISSION)
public void dagRedirect(@CommandParam("fromNodeUri") final String connectionTypeString, @CommandParam("fromNodeUri") final String fromNodeUri, @CommandParam("outputFboId") final int outputFboId,
@CommandParam("toNodeUri") final String toNodeUri, @CommandParam(value = "inputFboId") final int inputFboId) {
RenderGraph.ConnectionType connectionType;
if(connectionTypeString.equalsIgnoreCase("fbo")) {
connectionType = RenderGraph.ConnectionType.FBO;
} else if (connectionTypeString.equalsIgnoreCase("bufferpair")) {
connectionType = RenderGraph.ConnectionType.BUFFER_PAIR;
} else {
throw new RuntimeException(("Unsupported connection type: '" + connectionTypeString + "'. Expected 'fbo' or 'bufferpair'.\n"));
}
Node toNode = renderGraph.findNode(toNodeUri);
if (toNode == null) {
toNode = renderGraph.findAka(toNodeUri);
if (toNode == null) {
throw new RuntimeException(("No node is associated with URI '" + toNodeUri + "'"));
}
}
Node fromNode = renderGraph.findNode(fromNodeUri);
if (fromNode == null) {
fromNode = renderGraph.findAka(fromNodeUri);
if (fromNode == null) {
throw new RuntimeException(("No node is associated with URI '" + fromNodeUri + "'"));
}
}
renderGraph.reconnectInputToOutput(fromNode, outputFboId, toNode, inputFboId, connectionType, true);
toNode.clearDesiredStateChanges();
requestTaskListRefresh();
}
}
| engine/src/main/java/org/terasology/rendering/world/WorldRendererImpl.java | /*
* Copyright 2017 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.world;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.config.Config;
import org.terasology.config.RenderingConfig;
import org.terasology.context.Context;
import org.terasology.engine.module.ModuleManager;
import org.terasology.engine.module.rendering.RenderingModuleManager;
import org.terasology.engine.subsystem.DisplayDevice;
import org.terasology.engine.subsystem.lwjgl.GLBufferPool;
import org.terasology.engine.subsystem.lwjgl.LwjglGraphics;
import org.terasology.logic.console.Console;
import org.terasology.logic.console.commandSystem.MethodCommand;
import org.terasology.logic.console.commandSystem.annotations.Command;
import org.terasology.logic.console.commandSystem.annotations.CommandParam;
import org.terasology.logic.permission.PermissionManager;
import org.terasology.logic.players.LocalPlayerSystem;
import org.terasology.math.JomlUtil;
import org.terasology.math.TeraMath;
import org.terasology.math.geom.Vector3f;
import org.terasology.math.geom.Vector3i;
import org.terasology.rendering.ShaderManager;
import org.terasology.rendering.assets.material.Material;
import org.terasology.rendering.backdrop.BackdropProvider;
import org.terasology.rendering.cameras.OpenVRStereoCamera;
import org.terasology.rendering.cameras.PerspectiveCamera;
import org.terasology.rendering.cameras.SubmersibleCamera;
import org.terasology.engine.module.rendering.RenderingModuleRegistry;
import org.terasology.rendering.dag.Node;
import org.terasology.rendering.dag.ModuleRendering;
import org.terasology.rendering.dag.RenderGraph;
import org.terasology.rendering.dag.RenderPipelineTask;
import org.terasology.rendering.dag.RenderTaskListGenerator;
import org.terasology.rendering.dag.stateChanges.SetViewportToSizeOf;
import org.terasology.rendering.opengl.FBO;
import org.terasology.rendering.opengl.ScreenGrabber;
import org.terasology.rendering.opengl.fbms.DisplayResolutionDependentFbo;
import org.terasology.rendering.openvrprovider.OpenVRProvider;
import org.terasology.rendering.world.viewDistance.ViewDistance;
import org.terasology.utilities.Assets;
import org.terasology.world.WorldProvider;
import org.terasology.world.chunks.ChunkProvider;
import static org.lwjgl.opengl.GL11.GL_CULL_FACE;
import static org.lwjgl.opengl.GL11.glDisable;
import static org.lwjgl.opengl.GL11.glViewport;
/**
* Renders the 3D world, including background, overlays and first person/in hand objects. 2D UI elements are dealt with elsewhere.
* <p>
* This implementation includes support for OpenVR, through which HTC Vive and Oculus Rift is supported.
* <p>
* This implementation works closely with a number of support objects, in particular:
* <p>
* TODO: update this section to include new, relevant objects
* - a RenderableWorld instance, providing acceleration structures caching blocks requiring different rendering treatments<br/>
*/
public final class WorldRendererImpl implements WorldRenderer {
/*
* Presumably, the eye height should be context.get(Config.class).getPlayer().getEyeHeight() above the ground plane.
* It's not, so for now, we use this factor to adjust for the disparity.
*/
private static final Logger logger = LoggerFactory.getLogger(WorldRendererImpl.class);
private static final float GROUND_PLANE_HEIGHT_DISPARITY = -0.7f;
private RenderGraph renderGraph;
private RenderingModuleRegistry renderingModuleRegistry;
private boolean isFirstRenderingStageForCurrentFrame;
private final RenderQueuesHelper renderQueues;
private final Context context;
private final BackdropProvider backdropProvider;
private final WorldProvider worldProvider;
private final RenderableWorld renderableWorld;
private final ShaderManager shaderManager;
private final SubmersibleCamera playerCamera;
private final OpenVRProvider vrProvider;
private float timeSmoothedMainLightIntensity;
private RenderingStage currentRenderingStage;
private float millisecondsSinceRenderingStart;
private float secondsSinceLastFrame;
private int statChunkMeshEmpty;
private int statChunkNotReady;
private int statRenderedTriangles;
private final RenderingConfig renderingConfig;
private final Console console;
private RenderTaskListGenerator renderTaskListGenerator;
private boolean requestedTaskListRefresh;
private List<RenderPipelineTask> renderPipelineTaskList;
private DisplayResolutionDependentFbo displayResolutionDependentFbo;
/**
* Instantiates a WorldRenderer implementation.
* <p>
* This particular implementation works as deferred shader. The scene is rendered multiple times per frame
* in a number of separate passes (each stored in GPU buffers) and the passes are combined throughout the
* rendering pipeline to calculate per-pixel lighting and other effects.
* <p>
* Transparencies are handled through alpha rejection (i.e. ground plants) and alpha-based blending.
* An exception to this is water, which is handled separately to allow for reflections and refractions, if enabled.
* <p>
* By the time it is fully instantiated this implementation is already connected to all the support objects
* it requires and is ready to render via the render(RenderingStage) method.
*
* @param context a context object, to obtain instances of classes such as the rendering config.
* @param bufferPool a GLBufferPool, to be passed to the RenderableWorld instance used by this implementation.
*/
public WorldRendererImpl(Context context, GLBufferPool bufferPool) {
this.context = context;
renderGraph = new RenderGraph(context);
this.worldProvider = context.get(WorldProvider.class);
this.backdropProvider = context.get(BackdropProvider.class);
this.renderingConfig = context.get(Config.class).getRendering();
this.shaderManager = context.get(ShaderManager.class);
// TODO: Instantiate the VR provider at a more reasonable location, and just obtain it via context here.
vrProvider = OpenVRProvider.getInstance();
if (renderingConfig.isVrSupport()) {
context.put(OpenVRProvider.class, vrProvider);
// If vrProvider.init() returns false, this means that we are unable to initialize VR hardware for some
// reason (for example, no HMD is connected). In that case, even though the configuration requests
// vrSupport, we fall back on rendering to the main display. The reason for init failure can be read from
// the log.
if (vrProvider.init()) {
playerCamera = new OpenVRStereoCamera(vrProvider, worldProvider, renderingConfig);
/*
* The origin of OpenVR's coordinate system lies on the ground of the user. We have to move this origin
* such that the ground plane of the rendering system and the ground plane of the room the VR user is
* in match.
*/
vrProvider.getState().setGroundPlaneYOffset(
GROUND_PLANE_HEIGHT_DISPARITY - context.get(Config.class).getPlayer().getEyeHeight());
currentRenderingStage = RenderingStage.LEFT_EYE;
} else {
playerCamera = new PerspectiveCamera(worldProvider, renderingConfig, context.get(DisplayDevice.class));
currentRenderingStage = RenderingStage.MONO;
}
} else {
playerCamera = new PerspectiveCamera(worldProvider, renderingConfig, context.get(DisplayDevice.class));
currentRenderingStage = RenderingStage.MONO;
}
// TODO: won't need localPlayerSystem here once camera is in the ES proper
LocalPlayerSystem localPlayerSystem = context.get(LocalPlayerSystem.class);
localPlayerSystem.setPlayerCamera(playerCamera);
renderableWorld = new RenderableWorldImpl(worldProvider, context.get(ChunkProvider.class), bufferPool, playerCamera);
renderQueues = renderableWorld.getRenderQueues();
initRenderingSupport();
initRenderGraph();
initRenderingModules();
console = context.get(Console.class);
MethodCommand.registerAvailable(this, console, context);
}
private void initRenderingSupport() {
ScreenGrabber screenGrabber = new ScreenGrabber(context);
context.put(ScreenGrabber.class, screenGrabber);
displayResolutionDependentFbo = new DisplayResolutionDependentFbo(context.get(Config.class).getRendering(), screenGrabber, context.get(DisplayDevice.class));
context.put(DisplayResolutionDependentFbo.class, displayResolutionDependentFbo);
shaderManager.initShaders();
context.put(WorldRenderer.class, this);
context.put(RenderQueuesHelper.class, renderQueues);
context.put(RenderableWorld.class, renderableWorld);
}
private void initRenderGraph() {
context.put(RenderGraph.class, renderGraph);
renderTaskListGenerator = new RenderTaskListGenerator();
context.put(RenderTaskListGenerator.class, renderTaskListGenerator);
addDummyNodes();
}
private void initRenderingModules() {
renderingModuleRegistry = context.get(RenderingModuleManager.class).getRegistry();
// registry not populated by new ModuleRendering instances in UI, populate now
if (renderingModuleRegistry.getOrderedRenderingModules().isEmpty()) {
renderingModuleRegistry.updateRenderingModulesOrder(context.get(ModuleManager.class).getEnvironment(), context);
} else { // registry populated by new ModuleRendering instances in UI
// Switch module's context from gamecreation subcontext to gamerunning context
renderingModuleRegistry.updateModulesContext(context);
}
/*
TODO: work out where to put this.
renderGraph.connect(opaqueObjectsNode, overlaysNode);
renderGraph.connect(opaqueBlocksNode, overlaysNode);
renderGraph.connect(alphaRejectBlocksNode, overlaysNode);
*/
for (ModuleRendering moduleRenderingInstance : renderingModuleRegistry.getOrderedRenderingModules()) {
if (moduleRenderingInstance.isEnabled()) {
logger.info(String.format("\nInitialising rendering class %s from %s module.\n",
moduleRenderingInstance.getClass().getSimpleName(),
moduleRenderingInstance.getProvidingModule()));
moduleRenderingInstance.initialise();
}
}
requestTaskListRefresh();
}
public void addDummyNodes() {
/*
Node blurredAmbientOcclusionNode = new DummyNode ("blurredAmbientOcclusionNode", context);
blurredAmbientOcclusionNode.addOutputFboConnection(1);
renderGraph.addNode(blurredAmbientOcclusionNode);
Node prePostCompositeNode = new DummyNode("prePostCompositeNode", context);
renderGraph.addNode(prePostCompositeNode);
*/
}
@Override
public float getSecondsSinceLastFrame() {
return secondsSinceLastFrame;
}
@Override
public Material getMaterial(String assetId) {
return Assets.getMaterial(assetId).orElseThrow(() ->
new RuntimeException("Failed to resolve required asset: '" + assetId + "'"));
}
@Override
public void onChunkLoaded(Vector3i pos) {
renderableWorld.onChunkLoaded(pos);
}
@Override
public void onChunkUnloaded(Vector3i pos) {
renderableWorld.onChunkUnloaded(pos);
}
@Override
public boolean pregenerateChunks() {
return renderableWorld.pregenerateChunks();
}
@Override
public void update(float deltaInSeconds) {
secondsSinceLastFrame += deltaInSeconds;
}
private void resetStats() {
statChunkMeshEmpty = 0;
statChunkNotReady = 0;
statRenderedTriangles = 0;
}
@Override
public void increaseTrianglesCount(int increase) {
statRenderedTriangles += increase;
}
@Override
public void increaseNotReadyChunkCount(int increase) {
statChunkNotReady += increase;
}
private void preRenderUpdate(RenderingStage renderingStage) {
resetStats();
currentRenderingStage = renderingStage;
if (currentRenderingStage == RenderingStage.MONO || currentRenderingStage == RenderingStage.LEFT_EYE) {
isFirstRenderingStageForCurrentFrame = true;
} else {
isFirstRenderingStageForCurrentFrame = false;
}
// this is done to execute this code block only once per frame
// instead of once per eye in a stereo setup.
if (isFirstRenderingStageForCurrentFrame) {
timeSmoothedMainLightIntensity = TeraMath.lerp(timeSmoothedMainLightIntensity, getMainLightIntensityAt(JomlUtil.from(playerCamera.getPosition())), secondsSinceLastFrame);
playerCamera.update(secondsSinceLastFrame);
renderableWorld.update();
renderableWorld.generateVBOs();
secondsSinceLastFrame = 0;
displayResolutionDependentFbo.update();
millisecondsSinceRenderingStart += secondsSinceLastFrame * 1000; // updates the variable animations are based on.
}
if (currentRenderingStage != RenderingStage.MONO) {
playerCamera.updateFrustum();
}
// this line needs to be here as deep down it relies on the camera's frustrum, updated just above.
renderableWorld.queueVisibleChunks(isFirstRenderingStageForCurrentFrame);
if (requestedTaskListRefresh) {
List<Node> orderedNodes = renderGraph.getNodesInTopologicalOrder();
renderPipelineTaskList = renderTaskListGenerator.generateFrom(orderedNodes);
requestedTaskListRefresh = false;
}
}
/**
* TODO: update javadocs
* This method triggers the execution of the rendering pipeline and, eventually, sends the output to the display
* or to a file, when grabbing a screenshot.
* <p>
* In this particular implementation this method can be called once per frame, when rendering to a standard display,
* or twice, each time with a different rendering stage, when rendering to the head mounted display.
* <p>
* PerformanceMonitor.startActivity/endActivity statements are used in this method and in those it executes,
* to provide statistics regarding the ongoing rendering and its individual steps (i.e. rendering shadows,
* reflections, 2D filters...).
*
* @param renderingStage "MONO" for standard rendering and "LEFT_EYE" or "RIGHT_EYE" for stereoscopic displays.
*/
@Override
public void render(RenderingStage renderingStage) {
// If no rendering module populated renderGraph, throw an exception.
/* if (renderGraph.getNodeMapSize() < 1) {
throw new RuntimeException("Render graph is not ready to render. Did you use a rendering module?");
} */
preRenderUpdate(renderingStage);
// TODO: Add a method here to check wireframe configuration and regenerate "renderPipelineTask" accordingly.
// The following line re-establish OpenGL defaults, so that the nodes/tasks can rely on them.
// A place where Terasology overrides the defaults is LwjglGraphics.initOpenGLParams(), but
// there could be potentially other places, i.e. in the UI code. In the rendering engine we'd like
// to eventually rely on a default OpenGL state.
glDisable(GL_CULL_FACE);
FBO lastUpdatedGBuffer = displayResolutionDependentFbo.getGBufferPair().getLastUpdatedFbo();
glViewport(0, 0, lastUpdatedGBuffer.width(), lastUpdatedGBuffer.height());
// glDisable(GL_DEPTH_TEST);
// glDisable(GL_NORMALIZE); // currently keeping these as they are, until we find where they are used.
// glDepthFunc(GL_LESS);
renderPipelineTaskList.forEach(RenderPipelineTask::process);
// this line re-establish Terasology defaults, so that the rest of the application can rely on them.
LwjglGraphics.initOpenGLParams();
playerCamera.updatePrevViewProjectionMatrix();
}
@Override
public void requestTaskListRefresh() {
requestedTaskListRefresh = true;
}
@Override
public boolean isFirstRenderingStageForCurrentFrame() {
return isFirstRenderingStageForCurrentFrame;
}
/**
* Disposes of support objects used by this implementation.
*/
@Override
public void dispose() {
renderableWorld.dispose();
worldProvider.dispose();
renderGraph.dispose();
// TODO: Shift this to a better place, after a RenderGraph class has been implemented.
SetViewportToSizeOf.disposeDefaultInstance();
}
@Override
public void setViewDistance(ViewDistance viewDistance) {
renderableWorld.updateChunksInProximity(viewDistance);
}
@Override
public float getTimeSmoothedMainLightIntensity() {
return timeSmoothedMainLightIntensity;
}
@Override
public float getRenderingLightIntensityAt(Vector3f pos) {
float rawLightValueSun = worldProvider.getSunlight(pos) / 15.0f;
float rawLightValueBlock = worldProvider.getLight(pos) / 15.0f;
float lightValueSun = (float) Math.pow(BLOCK_LIGHT_SUN_POW, (1.0f - rawLightValueSun) * 16.0) * rawLightValueSun;
lightValueSun *= backdropProvider.getDaylight();
// TODO: Hardcoded factor and value to compensate for daylight tint and night brightness
lightValueSun *= 0.9f;
lightValueSun += 0.05f;
float lightValueBlock = (float) Math.pow(BLOCK_LIGHT_POW, (1.0f - (double) rawLightValueBlock) * 16.0f) * rawLightValueBlock * BLOCK_INTENSITY_FACTOR;
return Math.max(lightValueBlock, lightValueSun);
}
@Override
public float getMainLightIntensityAt(Vector3f position) {
return backdropProvider.getDaylight() * worldProvider.getSunlight(position) / 15.0f;
}
@Override
public float getBlockLightIntensityAt(Vector3f position) {
return worldProvider.getLight(position) / 15.0f;
}
@Override
public String getMetrics() {
String stringToReturn = "";
stringToReturn += renderableWorld.getMetrics();
stringToReturn += "Empty Mesh Chunks: ";
stringToReturn += statChunkMeshEmpty;
stringToReturn += "\n";
stringToReturn += "Unready Chunks: ";
stringToReturn += statChunkNotReady;
stringToReturn += "\n";
stringToReturn += "Rendered Triangles: ";
stringToReturn += statRenderedTriangles;
stringToReturn += "\n";
return stringToReturn;
}
@Override
public float getMillisecondsSinceRenderingStart() {
return millisecondsSinceRenderingStart;
}
@Override
public SubmersibleCamera getActiveCamera() {
return playerCamera;
}
@Override
public RenderingStage getCurrentRenderStage() {
return currentRenderingStage;
}
@Override
public RenderGraph getRenderGraph() {
return renderGraph;
}
/**
* Forces a recompilation of all shaders. This command, backed by Gestalt's monitoring feature,
* allows developers to hot-swap shaders for easy development.
*
* To run the command simply type "recompileShaders" and then press Enter in the console.
*/
@Command(shortDescription = "Forces a recompilation of shaders.", requiredPermission = PermissionManager.NO_PERMISSION)
public void recompileShaders() {
console.addMessage("Recompiling shaders... ", false);
shaderManager.recompileAllShaders();
console.addMessage("done!");
}
/**
* Acts as an interface between the console and the Nodes. All parameters passed to command are redirected to the
* concerned Nodes, which in turn take care of executing them.
*
* Usage:
* dagNodeCommand <nodeUri> <command> <parameters>
*
* Example:
* dagNodeCommand engine:outputToScreenNode setFbo engine:fbo.ssao
*/
@Command(shortDescription = "Debugging command for DAG.", requiredPermission = PermissionManager.NO_PERMISSION)
public void dagNodeCommand(@CommandParam("nodeUri") final String nodeUri, @CommandParam("command") final String command,
@CommandParam(value = "arguments") final String... arguments) {
Node node = renderGraph.findNode(nodeUri);
if (node == null) {
node = renderGraph.findAka(nodeUri);
if (node == null) {
throw new RuntimeException(("No node is associated with URI '" + nodeUri + "'"));
}
}
node.handleCommand(command, arguments);
}
/**
* Redirect output FBO from one node to another's input
*
* Usage:
* dagRedirect <connectionTypeString> <fromNodeUri> <outputFboId> <toNodeUri> <inputFboId>
*
* Example:
* dagRedirect fbo blurredAmbientOcclusion 1 BasicRendering:outputToScreenNode 1
* dagRedirect bufferpair backdrop 1 AdvancedRendering:intermediateHazeNode 1
*/
@Command(shortDescription = "Debugging command for DAG.", requiredPermission = PermissionManager.NO_PERMISSION)
public void dagRedirect(@CommandParam("fromNodeUri") final String connectionTypeString, @CommandParam("fromNodeUri") final String fromNodeUri, @CommandParam("outputFboId") final int outputFboId,
@CommandParam("toNodeUri") final String toNodeUri, @CommandParam(value = "inputFboId") final int inputFboId) {
RenderGraph.ConnectionType connectionType;
if(connectionTypeString.equalsIgnoreCase("fbo")) {
connectionType = RenderGraph.ConnectionType.FBO;
} else if (connectionTypeString.equalsIgnoreCase("bufferpair")) {
connectionType = RenderGraph.ConnectionType.BUFFER_PAIR;
} else {
throw new RuntimeException(("Unsupported connection type: '" + connectionTypeString + "'. Expected 'fbo' or 'bufferpair'.\n"));
}
Node toNode = renderGraph.findNode(toNodeUri);
if (toNode == null) {
toNode = renderGraph.findAka(toNodeUri);
if (toNode == null) {
throw new RuntimeException(("No node is associated with URI '" + toNodeUri + "'"));
}
}
Node fromNode = renderGraph.findNode(fromNodeUri);
if (fromNode == null) {
fromNode = renderGraph.findAka(fromNodeUri);
if (fromNode == null) {
throw new RuntimeException(("No node is associated with URI '" + fromNodeUri + "'"));
}
}
renderGraph.reconnectInputToOutput(fromNode, outputFboId, toNode, inputFboId, connectionType, true);
toNode.clearDesiredStateChanges();
requestTaskListRefresh();
}
}
| Abort load and show an error when there's no active rendering module.
| engine/src/main/java/org/terasology/rendering/world/WorldRendererImpl.java | Abort load and show an error when there's no active rendering module. |
|
Java | apache-2.0 | 53e4cf2176e4d83efeca27ed556d9cd04a5b8dcf | 0 | lucilecoutouly/heroic,OdenTech/heroic,dbrounst/heroic,spotify/heroic,dimaslv/heroic,zfrank/heroic,lucilecoutouly/heroic,spotify/heroic,spotify/heroic,lucilecoutouly/heroic,dimaslv/heroic,spotify/heroic | /*
* Copyright (c) 2015 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.heroic.http.write;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.container.AsyncResponse;
import javax.ws.rs.container.Suspended;
import javax.ws.rs.core.MediaType;
import com.google.inject.Inject;
import com.spotify.heroic.common.JavaxRestFramework;
import com.spotify.heroic.ingestion.IngestionManager;
@Path("/write")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public class WriteResource {
@Inject
private IngestionManager ingestion;
@Inject
private JavaxRestFramework httpAsync;
@POST
public void metrics(@Suspended final AsyncResponse response, @QueryParam("group") String group,
WriteMetricRequest write) throws Exception {
httpAsync.bind(response, ingestion.useGroup(group).write(write.toWriteMetric()), r -> r);
}
}
| heroic-core/src/main/java/com/spotify/heroic/http/write/WriteResource.java | /*
* Copyright (c) 2015 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.heroic.http.write;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.container.AsyncResponse;
import javax.ws.rs.container.Suspended;
import javax.ws.rs.core.MediaType;
import com.google.inject.Inject;
import com.spotify.heroic.common.JavaxRestFramework;
import com.spotify.heroic.ingestion.IngestionManager;
import lombok.Data;
@Path("/write")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public class WriteResource {
@Inject
private IngestionManager ingestion;
@Inject
private JavaxRestFramework httpAsync;
@Data
public static final class Message {
private final String message;
}
@POST
public void metrics(@Suspended final AsyncResponse response,
@QueryParam("backend") String backendGroup, WriteMetricRequest write) throws Exception {
httpAsync.bind(response, ingestion.useGroup(backendGroup).write(write.toWriteMetric()),
r -> r);
}
}
| [core] clean-up in /write resource
| heroic-core/src/main/java/com/spotify/heroic/http/write/WriteResource.java | [core] clean-up in /write resource |
|
Java | apache-2.0 | a168bf8983068b7e243bd8237da63b58d2249037 | 0 | consulo/consulo-ui-designer,consulo/consulo-ui-designer | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.uiDesigner.editor;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorPolicy;
import com.intellij.openapi.fileEditor.FileEditorProvider;
import com.intellij.openapi.fileEditor.FileEditorState;
import com.intellij.openapi.module.ModuleUtil;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.uiDesigner.GuiFormFileType;
import com.intellij.util.ArrayUtil;
public final class UIFormEditorProvider implements FileEditorProvider, DumbAware
{
private static final Logger LOG = Logger.getInstance("#com.intellij.uiDesigner.editor.UIFormEditorProvider");
@Override
public boolean accept(@NotNull final Project project, @NotNull final VirtualFile file)
{
return file.getFileType() == GuiFormFileType.INSTANCE &&
!GuiFormFileType.INSTANCE.isBinary() &&
(ModuleUtil.findModuleForFile(file, project) != null || file instanceof LightVirtualFile);
}
@Override
@NotNull
public FileEditor createEditor(@NotNull final Project project, @NotNull final VirtualFile file)
{
LOG.assertTrue(accept(project, file));
return new UIFormEditor(project, file);
}
@Override
public void disposeEditor(@NotNull final FileEditor editor)
{
Disposer.dispose(editor);
}
@Override
@NotNull
public FileEditorState readState(@NotNull final Element element, @NotNull final Project project, @NotNull final VirtualFile file)
{
//TODO[anton,vova] implement
return new MyEditorState(-1, ArrayUtil.EMPTY_STRING_ARRAY);
}
@Override
public void writeState(@NotNull final FileEditorState state, @NotNull final Project project, @NotNull final Element element)
{
//TODO[anton,vova] implement
}
@Override
@NotNull
public String getEditorTypeId()
{
return "ui-designer";
}
@Override
@NotNull
public FileEditorPolicy getPolicy()
{
return FileEditorPolicy.PLACE_BEFORE_DEFAULT_EDITOR;
}
}
| src/com/intellij/uiDesigner/editor/UIFormEditorProvider.java | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.uiDesigner.editor;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorPolicy;
import com.intellij.openapi.fileEditor.FileEditorProvider;
import com.intellij.openapi.fileEditor.FileEditorState;
import com.intellij.openapi.module.ModuleUtil;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.uiDesigner.GuiFormFileType;
import com.intellij.util.ArrayUtil;
public final class UIFormEditorProvider implements FileEditorProvider, DumbAware
{
private static final Logger LOG = Logger.getInstance("#com.intellij.uiDesigner.editor.UIFormEditorProvider");
@Override
public boolean accept(@NotNull final Project project, @NotNull final VirtualFile file)
{
return file.getFileType() == GuiFormFileType.INSTANCE &&
!GuiFormFileType.INSTANCE.isBinary() &&
(ModuleUtil.findModuleForFile(file, project) != null || file instanceof LightVirtualFile);
}
@Override
@NotNull
public FileEditor createEditor(@NotNull final Project project, @NotNull final VirtualFile file)
{
LOG.assertTrue(accept(project, file));
return new UIFormEditor(project, file);
}
@Override
public void disposeEditor(@NotNull final FileEditor editor)
{
Disposer.dispose(editor);
}
@Override
@NotNull
public FileEditorState readState(@NotNull final Element element, @NotNull final Project project, @NotNull final VirtualFile file)
{
//TODO[anton,vova] implement
return new MyEditorState(-1, ArrayUtil.EMPTY_STRING_ARRAY);
}
@Override
public void writeState(@NotNull final FileEditorState state, @NotNull final Project project, @NotNull final Element element)
{
//TODO[anton,vova] implement
}
@Override
@NotNull
public String getEditorTypeId()
{
return "ui-designer";
}
@Override
@NotNull
public FileEditorPolicy getPolicy()
{
return ApplicationManagerEx.getApplicationEx().isInternal() ? FileEditorPolicy.PLACE_BEFORE_DEFAULT_EDITOR : FileEditorPolicy
.HIDE_DEFAULT_EDITOR;
}
}
| changed policy
| src/com/intellij/uiDesigner/editor/UIFormEditorProvider.java | changed policy |
|
Java | artistic-2.0 | 3b55abae19e32b6802ba76f6bd86ccb58fbac1ee | 0 | sorinAche23/Psafe,gpmidi/pwsafe,Sp1l/pwsafe,sorinAche23/Psafe,Sp1l/pwsafe,gpmidi/pwsafe,gpmidi/pwsafe,ronys/pwsafe-test,sorinAche23/Psafe,Sp1l/pwsafe,sorinAche23/Psafe,ronys/pwsafe-test,ronys/pwsafe-test,gpmidi/pwsafe,sorinAche23/Psafe,ronys/pwsafe-test,Sp1l/pwsafe,Sp1l/pwsafe,Sp1l/pwsafe,sorinAche23/Psafe,gpmidi/pwsafe,Sp1l/pwsafe,ronys/pwsafe-test,sorinAche23/Psafe,sorinAche23/Psafe,ronys/pwsafe-test,ronys/pwsafe-test,gpmidi/pwsafe,gpmidi/pwsafe,Sp1l/pwsafe,gpmidi/pwsafe,ronys/pwsafe-test | package org.pwsafe.lib.crypto;
import java.security.MessageDigest;
import java.security.Provider;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
/**
* SHA256 implementation. Currently uses BouncyCastle provider underneath.
*
* @author Glen Smith
*/
public class SHA256Pws {
private static Provider provider = new BouncyCastleProvider();
private static MessageDigest md;
public static byte[] digest(byte[] incoming) {
try {
if (md == null) {
md = MessageDigest.getInstance("SHA256", provider);
}
return md.digest(incoming);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| pwsafe/Java/PasswordSafeLib/src/org/pwsafe/lib/crypto/SHA256Pws.java | package org.pwsafe.lib.crypto;
import java.security.MessageDigest;
import java.security.Provider;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
/**
* SHA256 implementation. Currently uses BouncyCastle provider underneath.
*
* @author Glen Smith
*/
public class SHA256Pws {
private static Provider provider = new BouncyCastleProvider();
public static byte[] digest(byte[] incoming) {
try {
MessageDigest md = MessageDigest.getInstance("SHA256", provider);
return md.digest(incoming);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| Cache handle to instance
git-svn-id: 7e36d3665aeca4d4e1f6df8911a80efc6ef565e7@964 1f79f812-37fb-46fe-a122-30589dd2bf55
| pwsafe/Java/PasswordSafeLib/src/org/pwsafe/lib/crypto/SHA256Pws.java | Cache handle to instance |
|
Java | bsd-2-clause | 7bf010fd9bfd0d6ea0d7dfdb28a1471ee83714be | 0 | RealTimeGenomics/rtg-tools,RealTimeGenomics/rtg-tools,RealTimeGenomics/rtg-tools,RealTimeGenomics/rtg-tools | /*
* Copyright (c) 2014. Real Time Genomics Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rtg.reader;
import static com.rtg.util.StringUtils.LS;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.CRC32;
import com.rtg.mode.SequenceType;
import com.rtg.util.StringUtils;
import com.rtg.util.array.ArrayUtils;
import com.rtg.util.array.ExtensibleIndex;
import com.rtg.util.array.longindex.LongChunks;
import com.rtg.util.bytecompression.BitwiseByteArray;
import com.rtg.util.bytecompression.ByteArray;
import com.rtg.util.bytecompression.ByteBaseCompression;
import com.rtg.util.bytecompression.ByteCompression;
import com.rtg.util.bytecompression.CompressedByteArray;
import com.rtg.util.diagnostic.Diagnostic;
import com.rtg.util.integrity.Exam;
import com.rtg.util.integrity.Integrity;
import com.rtg.util.intervals.LongRange;
/**
* SequencesReader which caches the entire SDF in memory.
* This can handle DNA or protein, and the quality data (if any) is compressed as well.
* See <code>com.rtg.reader.SdfSpeed</code> for speed benchmarking of this class.
*/
public class CompressedMemorySequencesReader extends AbstractSequencesReader implements Integrity {
/** Maximum quality value possible in an SDF */
public static final int MAX_QUAL_VALUE = 64;
private static final boolean DIRECT_SDF_LOAD = true; //Boolean.valueOf(System.getProperty("direct.sdf.load", "true"));
/**
* Creates a sequences reader from specified dir.
* @param dir directory containing sequence data
* @param loadNames whether to load names from disk or not
* @param loadFullNames whether to load full names from disk or not
* @param region subset of the SDF to load
* @return Sequence reader for data
* @throws IOException if an I/O error occurs
*/
public static SequencesReader createSequencesReader(final File dir, final boolean loadNames, boolean loadFullNames, LongRange region) throws IOException {
return createSequencesReader(dir, loadNames, loadFullNames, region, DIRECT_SDF_LOAD);
}
static SequencesReader createSequencesReader(final File dir, final boolean loadNames, boolean loadFullNames, LongRange region, boolean directLoad) throws IOException {
final IndexFile index = new IndexFile(dir);
if (index.getSequenceType() < 0 || index.getSequenceType() > SequenceType.values().length) {
throw new CorruptSdfException(dir);
}
final SequenceType type = SequenceType.values()[index.getSequenceType()];
final int range = type.numberKnownCodes() + type.firstValid();
if (directLoad && index.getSequenceEncoding() == IndexFile.SEQUENCE_ENCODING_COMPRESSED) {
return new CompressedMemorySequencesReader2(dir, index, loadNames, loadFullNames, region);
} else {
return new CompressedMemorySequencesReader(dir, index, range, loadNames, loadFullNames, region);
}
}
/**
* Creates a sequences reader from the specified source.
* Not optimised, intended only for testing
* @param source the data source
* @return Sequence reader for data
* @throws IOException if an I/O error occurs
*/
public static SequencesReader createSequencesReader(final SequenceDataSource source) throws IOException {
final List<byte[]> data = new ArrayList<>();
final List<String> labels = new ArrayList<>();
final List<Long> counts = new ArrayList<>();
int min = Integer.MAX_VALUE;
int max = Integer.MIN_VALUE;
while (source.nextSequence()) {
final byte[] b = new byte[source.currentLength()];
System.arraycopy(source.sequenceData(), 0, b, 0, source.currentLength());
data.add(b);
labels.add(source.name());
counts.add((long) source.currentLength());
min = Math.min(min, source.currentLength());
max = Math.max(max, source.currentLength());
}
final byte[][] dataArray = data.toArray(new byte[data.size()][]);
final String[] labelsArray = labels.toArray(new String[labels.size()]);
return new CompressedMemorySequencesReader(dataArray, labelsArray, ArrayUtils.asLongArray(counts), min, max, source.type());
}
private final File mDirectory;
private final File mCanonicalDirectory;
private final IndexFile mIndex;
private final ExtensibleIndex mPositions;
private final BitwiseByteArray mSeqData;
private final ByteArray mChecksums;
private final QualityLoader mQualityLoader;
private final LongRange mRegion; // Section of the sdf to load
protected final long mStart; // first sequence to load
protected final long mEnd; // last sequence to load
private final boolean mFullNamesRequested;
// Delayed initialization
private ByteCompression mQualityData;
private ByteArray mQualityChecksums;
private NamesInterface mNames;
private NamesInterface mNameSuffixes;
private int mReadCount = 0; // Number of reads read out (to indicate when to perform checksum check)
private int mQualityCount = 0; // Number of qualities read out (to indicate when to perform checksum check)
/**
* Shallow copy constructor.
* @param cmsr CompressedMemorySequencesReader to copy data from
*/
public CompressedMemorySequencesReader(final CompressedMemorySequencesReader cmsr) {
mDirectory = cmsr.mDirectory;
mCanonicalDirectory = cmsr.mCanonicalDirectory;
mIndex = cmsr.mIndex;
mPositions = cmsr.mPositions;
mChecksums = cmsr.mChecksums;
mSeqData = cmsr.mSeqData;
mNames = cmsr.mNames;
mNameSuffixes = cmsr.mNameSuffixes;
mQualityLoader = cmsr.mQualityLoader;
mQualityData = cmsr.mQualityData;
mQualityChecksums = cmsr.mQualityChecksums;
mStart = cmsr.mStart;
mEnd = cmsr.mEnd;
mRegion = cmsr.mRegion;
mFullNamesRequested = cmsr.mFullNamesRequested;
}
/**
* Construct directly from already prepared components
* @param originPath path to the original source of this data
* @param indexFile index information
* @param seqData compressed sequence data
* @param qualityData compressed quality data
* @param seqChecksums sequence data checksums
* @param qualityChecksums quality data checksums
* @param positions logical start position of each sequence
* @param names names of sequences
* @param nameSuffixes suffixes of names of sequences
* @param region region restriction
*/
public CompressedMemorySequencesReader(File originPath, IndexFile indexFile, BitwiseByteArray seqData, ByteCompression qualityData, ByteArray seqChecksums, ByteArray qualityChecksums, ExtensibleIndex positions, NamesInterface names, NamesInterface nameSuffixes, LongRange region) {
mDirectory = originPath;
mCanonicalDirectory = null;
mIndex = indexFile;
mSeqData = seqData;
mQualityData = qualityData;
mChecksums = seqChecksums;
mQualityChecksums = qualityChecksums;
mQualityLoader = null;
mPositions = positions;
mNames = names;
mNameSuffixes = nameSuffixes;
mFullNamesRequested = mNameSuffixes != null;
mRegion = SequencesReaderFactory.resolveRange(indexFile, region);
mStart = mRegion.getStart();
mEnd = mRegion.getEnd();
assert mEnd >= mStart;
if (mEnd > indexFile.getNumberSequences()) {
throw new IllegalArgumentException("End sequence is greater than number of sequences in SDF");
}
final StringBuilder sb = new StringBuilder("CompressedMemorySequencesReader from non SDF source");
this.infoString(sb);
Diagnostic.developerLog(sb.toString());
}
/**
* Constructor for use in tests. Has no quality data.
* @param data the sequence data
* @param labels the names
* @param counts lengths of the sequence data
* @param min minimum length
* @param max maximum length
* @param type the sequence type
*/
public CompressedMemorySequencesReader(final byte[][] data, final String[] labels, final long[] counts, final int min, final int max, final SequenceType type) {
assert data.length == counts.length;
assert data.length == labels.length;
final long totalLength = ArrayUtils.sum(counts);
mIndex = new IndexFile(Long.MAX_VALUE, type.ordinal(), totalLength, max, min, counts.length);
mDirectory = null;
mCanonicalDirectory = null;
mNames = new ArrayNames(labels);
final int range = type.numberKnownCodes() + type.firstValid();
mSeqData = new BitwiseByteArray(totalLength, CompressedByteArray.minBits(range));
mQualityData = null;
mQualityChecksums = null;
mQualityLoader = null;
mPositions = new LongChunks(data.length + 1);
mChecksums = ByteArray.allocate(data.length);
mStart = 0;
mEnd = mIndex.getNumberSequences();
mRegion = new LongRange(mStart, mEnd);
long pos = 0;
final CRC32 checksum = new CRC32();
for (int i = 0; i < counts.length; ++i) {
mPositions.set(i, pos);
mSeqData.set(pos, data[i], (int) counts[i]);
pos += counts[i];
checksum.update(data[i], 0, (int) counts[i]);
mChecksums.set(i, (byte) checksum.getValue());
checksum.reset();
}
mPositions.set(counts.length, pos);
mFullNamesRequested = false;
final StringBuilder sb = new StringBuilder(LS).append("CompressedMemorySequencesReader-tests");
this.infoString(sb);
Diagnostic.userLog(sb.toString());
}
protected CompressedMemorySequencesReader(File dir, IndexFile index, int range, boolean loadNames, boolean loadFullNames, LongRange region) throws IOException {
try {
try {
final long starttime = System.nanoTime();
mFullNamesRequested = loadFullNames;
mDirectory = dir;
mCanonicalDirectory = dir.getCanonicalFile();
mIndex = index;
mRegion = SequencesReaderFactory.resolveRange(index, region);
mStart = mRegion.getStart();
mEnd = mRegion.getEnd();
assert mEnd >= mStart;
if (mEnd > index.getNumberSequences()) {
throw new IllegalArgumentException("End sequence is greater than number of sequences in SDF");
}
final PointerFileHandler handler = PointerFileHandler.getHandler(index, PointerFileHandler.SEQUENCE_POINTER);
final DataFileIndex seqIndex = DataFileIndex.loadSequenceDataFileIndex(index.dataIndexVersion(), dir);
mPositions = new LongChunks(mEnd - mStart + 1);
mChecksums = ByteArray.allocate(mPositions.length() - 1);
final DataFileOpenerFactory openerFactory = new DataFileOpenerFactory(mIndex.getSequenceEncoding(), mIndex.getQualityEncoding(), type());
if (mIndex.hasQuality() && mIndex.hasPerSequenceChecksums()) {
mQualityChecksums = initQualityChecksumArray();
}
mQualityLoader = new QualityLoader(openerFactory, handler, mQualityChecksums);
long dataSize = 0;
if (mEnd - mStart > 0) {
dataSize = SequenceDataLoader.loadPositions(mPositions, seqIndex, mStart, mEnd, dir, handler, mChecksums, mQualityChecksums);
}
mSeqData = new BitwiseByteArray(dataSize, CompressedByteArray.minBits(range));
if (mEnd - mStart > 0) {
final long hash = SequenceDataLoader.loadData(mSeqData, seqIndex, mStart, mEnd, dir, mPositions, mChecksums, openerFactory, handler, mIndex.hasPerSequenceChecksums());
if (mIndex.getVersion() >= IndexFile.SEPARATE_CHECKSUM_VERSION && mStart == 0 && mEnd == index.getNumberSequences()) {
if (hash != mIndex.getDataChecksum()) {
throw new CorruptSdfException("Sequence data failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence data passed checksum");
}
}
}
final long stoptime = System.nanoTime();
final double timetaken = (stoptime - starttime) / 1000000000.0;
final int speedMB = (int) (dataSize * 1000.0 / (stoptime - starttime));
if (loadNames && mIndex.hasNames()) {
loadNames();
loadNameSuffixes(loadFullNames, mIndex.hasSequenceNameSuffixes());
}
final StringBuilder sb = new StringBuilder("CompressedMemorySequencesReader ").append(speedMB).append(" MB/sec, time ").append(timetaken).append(" sec").append(LS);
infoString(sb);
Diagnostic.developerLog(sb.toString());
} catch (final ArrayIndexOutOfBoundsException | IllegalArgumentException e) {
throw new CorruptSdfException();
}
} catch (final NegativeArraySizeException e) {
throw new CorruptSdfException();
}
}
@Override
public boolean integrity() {
Exam.assertTrue(checkChecksums());
return true;
}
@Override
public boolean globalIntegrity() {
return integrity();
}
@Override
public long numberSequences() {
return mEnd - mStart;
}
@Override
public File path() {
return mDirectory;
}
@Override
public void close() { } // no need to do anything
@Override
public NamesInterface names() {
if (mNames == null) {
throw new IllegalStateException("Names have not been loaded or are not present in the SDF");
}
return mNames;
}
/**
* Load the names if they haven't already been loaded.
* @throws IOException if an I/O related error occurs
*/
private void loadNames() throws IOException {
mNames = new Names(mDirectory, mRegion, false);
if (mIndex.getVersion() >= IndexFile.SEPARATE_CHECKSUM_VERSION && mRegion.getStart() == 0 && mRegion.getEnd() == mIndex.getNumberSequences()) {
if (mNames.calcChecksum() != mIndex.getNameChecksum()) {
throw new CorruptSdfException("Sequence names failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence names passed checksum");
}
}
}
private void loadNameSuffixes(boolean attemptLoad, boolean suffixExists) throws IOException {
mNameSuffixes = attemptLoad && suffixExists ? new Names(mDirectory, mRegion, true) : new EmptyStringNames(mRegion.getLength());
if (attemptLoad && suffixExists) {
if (mRegion.getStart() == 0 && mRegion.getEnd() == mIndex.getNumberSequences()) {
if (mNameSuffixes.calcChecksum() != mIndex.getNameSuffixChecksum()) {
throw new CorruptSdfException("Sequence name suffixes failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence name suffixes passed checksum");
}
}
}
}
@Override
public long lengthBetween(final long start, final long end) {
return mPositions.get(end) - mPositions.get(start);
}
@Override
public int[] sequenceLengths(final long start, final long end) {
final int[] a = new int[(int) (end - start)];
for (long i = start; i < end; ++i) {
a[(int) (i - start)] = (int) (mPositions.get(i + 1) - mPositions.get(i));
}
return a;
}
@Override
public SequencesReader copy() {
return new CompressedMemorySequencesReader(this);
}
// Direct access methods
@Override
public final String name(long sequenceIndex) {
if (!mIndex.hasNames()) {
throw new IllegalStateException("SDF contains no name data");
}
return names().name(sequenceIndex);
}
@Override
public final String nameSuffix(long sequenceIndex) {
if (!mIndex.hasNames()) {
throw new IllegalStateException("SDF contains no name data");
}
if (!mFullNamesRequested) {
throw new IllegalStateException("Full names were not loaded");
}
return mNameSuffixes.name(sequenceIndex);
}
@Override
public final int length(final long sequenceIndex) {
return (int) (mPositions.get(sequenceIndex + 1) - mPositions.get(sequenceIndex));
}
@Override
public byte sequenceDataChecksum(long sequenceIndex) throws IOException {
return mChecksums.get(sequenceIndex);
}
@Override
public int read(final long sequenceIndex, final byte[] dataOut) {
final int length = read(sequenceIndex, dataOut, 0, length(sequenceIndex));
++mReadCount;
// check every Nth (256) read for CRC corruption w.r.t. the original file.
if ((mReadCount & 0xff) == 0) {
checkChecksum(sequenceIndex, dataOut, length, mChecksums.get(sequenceIndex));
}
return length;
}
@Override
public int read(final long sequenceIndex, final byte[] dataOut, final int start, final int length) {
final int fullLength = length(sequenceIndex);
if ((start + length) > fullLength) {
throw new IllegalArgumentException("Requested data not a subset of sequence data.");
}
if (length > dataOut.length) {
throw new IllegalArgumentException("Array too small got: " + dataOut.length + " required: " + length);
}
mSeqData.get(dataOut, mPositions.get(sequenceIndex) + start, length);
return length;
}
@Override
public int readQuality(final long sequenceIndex, final byte[] dest) throws IOException {
return readQuality(sequenceIndex, dest, 0, length(sequenceIndex));
}
@Override
public int readQuality(long sequenceIndex, byte[] dest, int start, int length) throws IOException {
pullInQuality();
if (mQualityData == null) {
return 0;
}
final int fullLength = length(sequenceIndex);
if ((start + length) > fullLength) {
throw new IllegalArgumentException("Requested data not a subset of sequence data.");
} else if (length > dest.length) {
throw new IllegalArgumentException("Array too small got: " + dest.length + " required: " + length);
}
mQualityData.get(dest, sequenceIndex, start, length);
// check every Nth (256) read for CRC corruption w.r.t. the original file.
++mQualityCount;
if ((mQualityCount & 0xff) == 0 && length > 0) {
checkChecksum(sequenceIndex, dest, length, mQualityChecksums.get(sequenceIndex));
}
return length;
}
void pullInQuality() throws IOException {
if (mQualityData == null && mIndex.hasQuality()) {
mQualityData = mQualityLoader.getQuality();
mQualityChecksums = mQualityLoader.getQualityChecksums();
final StringBuilder sb = new StringBuilder();
sb.append(LS);
sb.append("Memory Usage\tbytes\tlength").append(LS);
infoQuality(sb);
Diagnostic.developerLog(sb.toString());
}
}
ByteArray initQualityChecksumArray() {
return ByteArray.allocate(mPositions.length() - 1);
}
void infoString(final StringBuilder sb) {
sb.append("Memory Usage\tbytes\tlength").append(LS);
long totalBytes = 0;
sb.append("\t\t").append(StringUtils.commas(mSeqData.bytes())).append("\t").append(StringUtils.commas(mSeqData.length())).append("\tSeqData").append(LS);
totalBytes += mSeqData.bytes();
sb.append("\t\t").append(StringUtils.commas(mChecksums.bytes())).append("\t").append(StringUtils.commas(mChecksums.length())).append("\tSeqChecksums").append(LS);
totalBytes += mPositions.bytes();
totalBytes += infoQuality(sb);
if (mNames != null) {
sb.append("\t\t").append(StringUtils.commas(mNames.bytes())).append("\t").append(StringUtils.commas(mNames.length())).append("\tNames").append(LS);
totalBytes += mNames.bytes();
}
if (mNameSuffixes != null) {
sb.append("\t\t").append(StringUtils.commas(mNameSuffixes.bytes())).append("\t").append(StringUtils.commas(mNameSuffixes.length())).append("\tSuffixes").append(LS);
totalBytes += mNameSuffixes.bytes();
}
sb.append("\t\t").append(StringUtils.commas(mPositions.bytes())).append("\t").append(StringUtils.commas(mPositions.length())).append("\tPositions").append(LS);
totalBytes += mPositions.bytes();
sb.append("\t\t").append(StringUtils.commas(totalBytes)).append("\t\tTotal bytes").append(LS);
}
private long infoQuality(StringBuilder sb) {
long qualBytes = 0;
if (mQualityData != null) {
sb.append("\t\t").append(StringUtils.commas(mQualityData.bytes())).append("\t").append(StringUtils.commas(mPositions.get(mPositions.length() - 1))).append("\tQualityData").append(LS);
qualBytes += mQualityData.bytes();
sb.append("\t\t").append(StringUtils.commas(mQualityChecksums.bytes())).append("\t").append(StringUtils.commas(mPositions.get(mPositions.length() - 1))).append("\tQualityChecksums").append(LS);
qualBytes += mQualityChecksums.bytes();
}
return qualBytes;
}
boolean checkChecksums() {
final CRC32 checksum = new CRC32();
final byte[] buffer = new byte[(int) maxLength()];
for (long i = 0; i < numberSequences(); ++i) {
mSeqData.get(buffer, mPositions.get(i), length(i));
checksum.reset();
checksum.update(buffer, 0, length(i));
if ((byte) checksum.getValue() != mChecksums.get(i)) {
return false;
}
if (mQualityData != null) {
// now check quality checksums
mQualityData.get(buffer, i, 0, length(i));
checksum.reset();
checksum.update(buffer, 0, length(i));
if ((byte) checksum.getValue() != mQualityChecksums.get(i)) {
return false;
}
}
}
return true;
}
static boolean checkChecksum(final long seqId, final byte[] data, final int length, final int sum) {
final CRC32 checksum = new CRC32();
checksum.update(data, 0, length);
if ((byte) checksum.getValue() == sum) {
return true;
} else {
Diagnostic.userLog(String.format("CHECKSUM FAILED FOR SEQUENCE %d%n" + "EXPECTED %04X BUT WAS %04X", seqId, sum, (byte) checksum.getValue()));
return false;
}
}
int getChecksum(final long seqId) {
return mChecksums.get(seqId);
}
private class QualityLoader {
private final DataFileOpenerFactory mOpenerFactory;
private final PointerFileHandler mHandler;
QualityLoader(DataFileOpenerFactory fact, PointerFileHandler handler, ByteArray qualityChecksums) {
mOpenerFactory = fact;
mHandler = handler;
mQualityChecksums = qualityChecksums;
}
// http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html
volatile boolean mInit = false;
private volatile ByteArray mQualityChecksums;
private volatile ByteCompression mQualityData;
private synchronized void init() throws IOException {
if (!mInit) {
Diagnostic.developerLog("loading quality data...");
final boolean preloaded;
if (mQualityChecksums == null) {
mQualityChecksums = initQualityChecksumArray();
preloaded = false;
} else {
preloaded = true;
}
final long dataSize = mPositions.get(mPositions.length() - 1);
final ByteArray qualData = new CompressedByteArray(dataSize, MAX_QUAL_VALUE, false);
final DataFileIndex seqIndex = DataFileIndex.loadSequenceDataFileIndex(mIndex.dataIndexVersion(), mDirectory);
final long hash = SequenceDataLoader.loadQuality(qualData, seqIndex, mStart, mEnd, mDirectory, mPositions, mQualityChecksums, mOpenerFactory, mHandler, preloaded);
if (mStart == 0 && mEnd == mIndex.getNumberSequences() && mIndex.getVersion() >= IndexFile.SEPARATE_CHECKSUM_VERSION) {
if (hash != mIndex.getQualityChecksum()) {
throw new CorruptSdfException("Sequence qualities failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence qualities passed checksum");
}
}
mQualityData = new ByteBaseCompression(qualData, mPositions);
mQualityData.freeze();
Diagnostic.developerLog("Loaded qualities for CompressedMemorySequencesReader");
mInit = true;
Diagnostic.developerLog("finished loading quality data");
}
}
ByteCompression getQuality() throws IOException {
if (!mInit) {
init();
}
return mQualityData;
}
ByteArray getQualityChecksums() throws IOException {
if (!mInit) {
init();
}
return mQualityChecksums;
}
}
@Override
public boolean compressed() {
return mIndex.getSequenceEncoding() == IndexFile.SEQUENCE_ENCODING_COMPRESSED;
}
@Override
public IndexFile index() {
return mIndex;
}
}
| src/com/rtg/reader/CompressedMemorySequencesReader.java | /*
* Copyright (c) 2014. Real Time Genomics Limited.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rtg.reader;
import static com.rtg.util.StringUtils.LS;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.CRC32;
import com.rtg.mode.SequenceType;
import com.rtg.util.StringUtils;
import com.rtg.util.array.ArrayUtils;
import com.rtg.util.array.ExtensibleIndex;
import com.rtg.util.array.longindex.LongChunks;
import com.rtg.util.bytecompression.BitwiseByteArray;
import com.rtg.util.bytecompression.ByteArray;
import com.rtg.util.bytecompression.ByteBaseCompression;
import com.rtg.util.bytecompression.ByteCompression;
import com.rtg.util.bytecompression.CompressedByteArray;
import com.rtg.util.diagnostic.Diagnostic;
import com.rtg.util.integrity.Exam;
import com.rtg.util.integrity.Integrity;
import com.rtg.util.intervals.LongRange;
/**
* SequencesReader which caches the entire SDF in memory.
* This can handle DNA or protein, and the quality data (if any) is compressed as well.
* See <code>com.rtg.reader.SdfSpeed</code> for speed benchmarking of this class.
*/
public class CompressedMemorySequencesReader extends AbstractSequencesReader implements Integrity {
/** Maximum quality value possible in an SDF */
public static final int MAX_QUAL_VALUE = 64;
private static final boolean DIRECT_SDF_LOAD = true; //Boolean.valueOf(System.getProperty("direct.sdf.load", "true"));
/**
* Creates a sequences reader from specified dir.
* @param dir directory containing sequence data
* @param loadNames whether to load names from disk or not
* @param loadFullNames whether to load full names from disk or not
* @param region subset of the SDF to load
* @return Sequence reader for data
* @throws IOException if an I/O error occurs
*/
public static SequencesReader createSequencesReader(final File dir, final boolean loadNames, boolean loadFullNames, LongRange region) throws IOException {
return createSequencesReader(dir, loadNames, loadFullNames, region, DIRECT_SDF_LOAD);
}
static SequencesReader createSequencesReader(final File dir, final boolean loadNames, boolean loadFullNames, LongRange region, boolean directLoad) throws IOException {
final IndexFile index = new IndexFile(dir);
if (index.getSequenceType() < 0 || index.getSequenceType() > SequenceType.values().length) {
throw new CorruptSdfException(dir);
}
final SequenceType type = SequenceType.values()[index.getSequenceType()];
final int range = type.numberKnownCodes() + type.firstValid();
if (directLoad && index.getSequenceEncoding() == IndexFile.SEQUENCE_ENCODING_COMPRESSED) {
return new CompressedMemorySequencesReader2(dir, index, loadNames, loadFullNames, region);
} else {
return new CompressedMemorySequencesReader(dir, index, range, loadNames, loadFullNames, region);
}
}
/**
* Creates a sequences reader from the specified source.
* Not optimised, intended only for testing
* @param source the data source
* @return Sequence reader for data
* @throws IOException if an I/O error occurs
*/
public static SequencesReader createSequencesReader(final SequenceDataSource source) throws IOException {
final List<byte[]> data = new ArrayList<>();
final List<String> labels = new ArrayList<>();
final List<Long> counts = new ArrayList<>();
int min = Integer.MAX_VALUE;
int max = Integer.MIN_VALUE;
while (source.nextSequence()) {
final byte[] b = new byte[source.currentLength()];
System.arraycopy(source.sequenceData(), 0, b, 0, source.currentLength());
data.add(b);
labels.add(source.name());
counts.add((long) source.currentLength());
min = Math.min(min, source.currentLength());
max = Math.max(max, source.currentLength());
}
final byte[][] dataArray = data.toArray(new byte[data.size()][]);
final String[] labelsArray = labels.toArray(new String[labels.size()]);
return new CompressedMemorySequencesReader(dataArray, labelsArray, ArrayUtils.asLongArray(counts), min, max, source.type());
}
private final File mDirectory;
private final File mCanonicalDirectory;
private final IndexFile mIndex;
private final ExtensibleIndex mPositions;
private final BitwiseByteArray mSeqData;
private final ByteArray mChecksums;
private final QualityLoader mQualityLoader;
private final LongRange mRegion; // Section of the sdf to load
protected final long mStart; // first sequence to load
protected final long mEnd; // last sequence to load
private final boolean mFullNamesRequested;
// Delayed initialization
private ByteCompression mQualityData;
private ByteArray mQualityChecksums;
private NamesInterface mNames;
private NamesInterface mNameSuffixes;
private int mReadCount = 0; // Number of reads read out (to indicate when to perform checksum check)
private int mQualityCount = 0; // Number of qualities read out (to indicate when to perform checksum check)
/**
* Shallow copy constructor.
* @param cmsr CompressedMemorySequencesReader to copy data from
*/
public CompressedMemorySequencesReader(final CompressedMemorySequencesReader cmsr) {
mDirectory = cmsr.mDirectory;
mCanonicalDirectory = cmsr.mCanonicalDirectory;
mIndex = cmsr.mIndex;
mPositions = cmsr.mPositions;
mChecksums = cmsr.mChecksums;
mSeqData = cmsr.mSeqData;
mNames = cmsr.mNames;
mNameSuffixes = cmsr.mNameSuffixes;
mQualityLoader = cmsr.mQualityLoader;
mQualityData = cmsr.mQualityData;
mQualityChecksums = cmsr.mQualityChecksums;
mStart = cmsr.mStart;
mEnd = cmsr.mEnd;
mRegion = cmsr.mRegion;
mFullNamesRequested = cmsr.mFullNamesRequested;
}
/**
* Construct directly from already prepared components
* @param originPath path to the original source of this data
* @param indexFile index information
* @param seqData compressed sequence data
* @param qualityData compressed quality data
* @param seqChecksums sequence data checksums
* @param qualityChecksums quality data checksums
* @param positions logical start position of each sequence
* @param names names of sequences
* @param nameSuffixes suffixes of names of sequences
* @param region region restriction
*/
public CompressedMemorySequencesReader(File originPath, IndexFile indexFile, BitwiseByteArray seqData, ByteCompression qualityData, ByteArray seqChecksums, ByteArray qualityChecksums, ExtensibleIndex positions, NamesInterface names, NamesInterface nameSuffixes, LongRange region) {
mDirectory = originPath;
mCanonicalDirectory = null;
mIndex = indexFile;
mSeqData = seqData;
mQualityData = qualityData;
mChecksums = seqChecksums;
mQualityChecksums = qualityChecksums;
mQualityLoader = null;
mPositions = positions;
mNames = names;
mNameSuffixes = nameSuffixes;
mFullNamesRequested = mNameSuffixes != null;
mRegion = SequencesReaderFactory.resolveRange(indexFile, region);
mStart = mRegion.getStart();
mEnd = mRegion.getEnd();
assert mEnd >= mStart;
if (mEnd > indexFile.getNumberSequences()) {
throw new IllegalArgumentException("End sequence is greater than number of sequences in SDF");
}
final StringBuilder sb = new StringBuilder("CompressedMemorySequencesReader from non SDF source");
this.infoString(sb);
Diagnostic.developerLog(sb.toString());
}
/**
* Constructor for use in tests. Has no quality data.
* @param data the sequence data
* @param labels the names
* @param counts lengths of the sequence data
* @param min minimum length
* @param max maximum length
* @param type the sequence type
*/
public CompressedMemorySequencesReader(final byte[][] data, final String[] labels, final long[] counts, final int min, final int max, final SequenceType type) {
assert data.length == counts.length;
assert data.length == labels.length;
final long totalLength = ArrayUtils.sum(counts);
mIndex = new IndexFile(Long.MAX_VALUE, type.ordinal(), totalLength, max, min, counts.length);
mDirectory = null;
mCanonicalDirectory = null;
mNames = new ArrayNames(labels);
final int range = type.numberKnownCodes() + type.firstValid();
mSeqData = new BitwiseByteArray(totalLength, CompressedByteArray.minBits(range));
mQualityData = null;
mQualityChecksums = null;
mQualityLoader = null;
mPositions = new LongChunks(data.length + 1);
mChecksums = ByteArray.allocate(data.length);
mStart = 0;
mEnd = mIndex.getNumberSequences();
mRegion = new LongRange(mStart, mEnd);
long pos = 0;
final CRC32 checksum = new CRC32();
for (int i = 0; i < counts.length; ++i) {
mPositions.set(i, pos);
mSeqData.set(pos, data[i], (int) counts[i]);
pos += counts[i];
checksum.update(data[i], 0, (int) counts[i]);
mChecksums.set(i, (byte) checksum.getValue());
checksum.reset();
}
mPositions.set(counts.length, pos);
mFullNamesRequested = false;
final StringBuilder sb = new StringBuilder(LS + "CompressedMemorySequencesReader-tests");
this.infoString(sb);
Diagnostic.userLog(sb.toString());
}
protected CompressedMemorySequencesReader(File dir, IndexFile index, int range, boolean loadNames, boolean loadFullNames, LongRange region) throws IOException {
try {
try {
final long starttime = System.nanoTime();
mFullNamesRequested = loadFullNames;
mDirectory = dir;
mCanonicalDirectory = dir.getCanonicalFile();
mIndex = index;
mRegion = SequencesReaderFactory.resolveRange(index, region);
mStart = mRegion.getStart();
mEnd = mRegion.getEnd();
assert mEnd >= mStart;
if (mEnd > index.getNumberSequences()) {
throw new IllegalArgumentException("End sequence is greater than number of sequences in SDF");
}
final PointerFileHandler handler = PointerFileHandler.getHandler(index, PointerFileHandler.SEQUENCE_POINTER);
final DataFileIndex seqIndex = DataFileIndex.loadSequenceDataFileIndex(index.dataIndexVersion(), dir);
mPositions = new LongChunks(mEnd - mStart + 1);
mChecksums = ByteArray.allocate(mPositions.length() - 1);
final DataFileOpenerFactory openerFactory = new DataFileOpenerFactory(mIndex.getSequenceEncoding(), mIndex.getQualityEncoding(), type());
if (mIndex.hasQuality() && mIndex.hasPerSequenceChecksums()) {
mQualityChecksums = initQualityChecksumArray();
}
mQualityLoader = new QualityLoader(openerFactory, handler, mQualityChecksums);
long dataSize = 0;
if (mEnd - mStart > 0) {
dataSize = SequenceDataLoader.loadPositions(mPositions, seqIndex, mStart, mEnd, dir, handler, mChecksums, mQualityChecksums);
}
mSeqData = new BitwiseByteArray(dataSize, CompressedByteArray.minBits(range));
if (mEnd - mStart > 0) {
final long hash = SequenceDataLoader.loadData(mSeqData, seqIndex, mStart, mEnd, dir, mPositions, mChecksums, openerFactory, handler, mIndex.hasPerSequenceChecksums());
if (mIndex.getVersion() >= IndexFile.SEPARATE_CHECKSUM_VERSION && mStart == 0 && mEnd == index.getNumberSequences()) {
if (hash != mIndex.getDataChecksum()) {
throw new CorruptSdfException("Sequence data failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence data passed checksum");
}
}
}
final long stoptime = System.nanoTime();
final double timetaken = (stoptime - starttime) / 1000000000.0;
final int speedMB = (int) (dataSize * 1000.0 / (stoptime - starttime));
if (loadNames && mIndex.hasNames()) {
loadNames();
loadNameSuffixes(loadFullNames, mIndex.hasSequenceNameSuffixes());
}
final StringBuilder sb = new StringBuilder("CompressedMemorySequencesReader " + speedMB + " MB/sec, time " + timetaken + " sec" + LS);
this.infoString(sb);
Diagnostic.developerLog(sb.toString());
} catch (final ArrayIndexOutOfBoundsException | IllegalArgumentException e) {
throw new CorruptSdfException();
}
} catch (final NegativeArraySizeException e) {
throw new CorruptSdfException();
}
}
@Override
public boolean integrity() {
Exam.assertTrue(checkChecksums());
return true;
}
@Override
public boolean globalIntegrity() {
return integrity();
}
@Override
public long numberSequences() {
return mEnd - mStart;
}
@Override
public File path() {
return mDirectory;
}
@Override
public void close() { } // no need to do anything
@Override
public NamesInterface names() {
if (mNames == null) {
throw new IllegalStateException("Names have not been loaded or are not present in the SDF");
}
return mNames;
}
/**
* Load the names if they haven't already been loaded.
* @throws IOException if an I/O related error occurs
*/
private void loadNames() throws IOException {
mNames = new Names(mDirectory, mRegion, false);
if (mIndex.getVersion() >= IndexFile.SEPARATE_CHECKSUM_VERSION && mRegion.getStart() == 0 && mRegion.getEnd() == mIndex.getNumberSequences()) {
if (mNames.calcChecksum() != mIndex.getNameChecksum()) {
throw new CorruptSdfException("Sequence names failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence names passed checksum");
}
}
}
private void loadNameSuffixes(boolean attemptLoad, boolean suffixExists) throws IOException {
mNameSuffixes = attemptLoad && suffixExists ? new Names(mDirectory, mRegion, true) : new EmptyStringNames(mRegion.getLength());
if (attemptLoad && suffixExists) {
if (mRegion.getStart() == 0 && mRegion.getEnd() == mIndex.getNumberSequences()) {
if (mNameSuffixes.calcChecksum() != mIndex.getNameSuffixChecksum()) {
throw new CorruptSdfException("Sequence name suffixes failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence name suffixes passed checksum");
}
}
}
}
@Override
public long lengthBetween(final long start, final long end) {
return mPositions.get(end) - mPositions.get(start);
}
@Override
public int[] sequenceLengths(final long start, final long end) {
final int[] a = new int[(int) (end - start)];
for (long i = start; i < end; ++i) {
a[(int) (i - start)] = (int) (mPositions.get(i + 1) - mPositions.get(i));
}
return a;
}
@Override
public SequencesReader copy() {
return new CompressedMemorySequencesReader(this);
}
// Direct access methods
@Override
public final String name(long sequenceIndex) {
if (!mIndex.hasNames()) {
throw new IllegalStateException("SDF contains no name data");
}
return names().name(sequenceIndex);
}
@Override
public final String nameSuffix(long sequenceIndex) {
if (!mIndex.hasNames()) {
throw new IllegalStateException("SDF contains no name data");
}
if (!mFullNamesRequested) {
throw new IllegalStateException("Full names were not loaded");
}
return mNameSuffixes.name(sequenceIndex);
}
@Override
public final int length(final long sequenceIndex) {
return (int) (mPositions.get(sequenceIndex + 1) - mPositions.get(sequenceIndex));
}
@Override
public byte sequenceDataChecksum(long sequenceIndex) throws IOException {
return mChecksums.get(sequenceIndex);
}
@Override
public int read(final long sequenceIndex, final byte[] dataOut) {
final int length = read(sequenceIndex, dataOut, 0, length(sequenceIndex));
++mReadCount;
// check every Nth (256) read for CRC corruption w.r.t. the original file.
if ((mReadCount & 0xff) == 0) {
checkChecksum(sequenceIndex, dataOut, length, mChecksums.get(sequenceIndex));
}
return length;
}
@Override
public int read(final long sequenceIndex, final byte[] dataOut, final int start, final int length) {
final int fullLength = length(sequenceIndex);
if ((start + length) > fullLength) {
throw new IllegalArgumentException("Requested data not a subset of sequence data.");
}
if (length > dataOut.length) {
throw new IllegalArgumentException("Array too small got: " + dataOut.length + " required: " + length);
}
mSeqData.get(dataOut, mPositions.get(sequenceIndex) + start, length);
return length;
}
@Override
public int readQuality(final long sequenceIndex, final byte[] dest) throws IOException {
return readQuality(sequenceIndex, dest, 0, length(sequenceIndex));
}
@Override
public int readQuality(long sequenceIndex, byte[] dest, int start, int length) throws IOException {
pullInQuality();
if (mQualityData == null) {
return 0;
}
final int fullLength = length(sequenceIndex);
if ((start + length) > fullLength) {
throw new IllegalArgumentException("Requested data not a subset of sequence data.");
} else if (length > dest.length) {
throw new IllegalArgumentException("Array too small got: " + dest.length + " required: " + length);
}
mQualityData.get(dest, sequenceIndex, start, length);
// check every Nth (256) read for CRC corruption w.r.t. the original file.
++mQualityCount;
if ((mQualityCount & 0xff) == 0 && length > 0) {
checkChecksum(sequenceIndex, dest, length, mQualityChecksums.get(sequenceIndex));
}
return length;
}
void pullInQuality() throws IOException {
if (mQualityData == null && mIndex.hasQuality()) {
mQualityData = mQualityLoader.getQuality();
mQualityChecksums = mQualityLoader.getQualityChecksums();
final StringBuilder sb = new StringBuilder();
sb.append(LS);
sb.append("Memory Usage\tbytes\tlength").append(LS);
infoQuality(sb);
Diagnostic.developerLog(sb.toString());
}
}
ByteArray initQualityChecksumArray() {
return ByteArray.allocate(mPositions.length() - 1);
}
void infoString(final StringBuilder sb) {
sb.append("Memory Usage\tbytes\tlength").append(LS);
long totalBytes = 0;
sb.append("\t\t").append(StringUtils.commas(mSeqData.bytes())).append("\t").append(StringUtils.commas(mSeqData.length())).append("\tSeqData").append(LS);
totalBytes += mSeqData.bytes();
sb.append("\t\t").append(StringUtils.commas(mChecksums.bytes())).append("\t").append(StringUtils.commas(mChecksums.length())).append("\tSeqChecksums").append(LS);
totalBytes += mPositions.bytes();
totalBytes += infoQuality(sb);
if (mNames != null) {
sb.append("\t\t").append(StringUtils.commas(mNames.bytes())).append("\t").append(StringUtils.commas(mNames.length())).append("\tNames").append(LS);
totalBytes += mNames.bytes();
}
if (mNameSuffixes != null) {
sb.append("\t\t").append(StringUtils.commas(mNameSuffixes.bytes())).append("\t").append(StringUtils.commas(mNameSuffixes.length())).append("\tSuffixes").append(LS);
totalBytes += mNameSuffixes.bytes();
}
sb.append("\t\t").append(StringUtils.commas(mPositions.bytes())).append("\t").append(StringUtils.commas(mPositions.length())).append("\tPositions").append(LS);
totalBytes += mPositions.bytes();
sb.append("\t\t").append(StringUtils.commas(totalBytes)).append("\t\tTotal bytes").append(LS);
}
private long infoQuality(StringBuilder sb) {
long qualBytes = 0;
if (mQualityData != null) {
sb.append("\t\t").append(StringUtils.commas(mQualityData.bytes())).append("\t").append(StringUtils.commas(mPositions.get(mPositions.length() - 1))).append("\tQualityData").append(LS);
qualBytes += mQualityData.bytes();
sb.append("\t\t").append(StringUtils.commas(mQualityChecksums.bytes())).append("\t").append(StringUtils.commas(mPositions.get(mPositions.length() - 1))).append("\tQualityChecksums").append(LS);
qualBytes += mQualityChecksums.bytes();
}
return qualBytes;
}
boolean checkChecksums() {
final CRC32 checksum = new CRC32();
final byte[] buffer = new byte[(int) maxLength()];
for (long i = 0; i < numberSequences(); ++i) {
mSeqData.get(buffer, mPositions.get(i), length(i));
checksum.reset();
checksum.update(buffer, 0, length(i));
if ((byte) checksum.getValue() != mChecksums.get(i)) {
return false;
}
if (mQualityData != null) {
// now check quality checksums
mQualityData.get(buffer, i, 0, length(i));
checksum.reset();
checksum.update(buffer, 0, length(i));
if ((byte) checksum.getValue() != mQualityChecksums.get(i)) {
return false;
}
}
}
return true;
}
static boolean checkChecksum(final long seqId, final byte[] data, final int length, final int sum) {
final CRC32 checksum = new CRC32();
checksum.update(data, 0, length);
if ((byte) checksum.getValue() == sum) {
return true;
} else {
Diagnostic.userLog(String.format("CHECKSUM FAILED FOR SEQUENCE %d%n" + "EXPECTED %04X BUT WAS %04X", seqId, sum, (byte) checksum.getValue()));
return false;
}
}
int getChecksum(final long seqId) {
return mChecksums.get(seqId);
}
private class QualityLoader {
private final DataFileOpenerFactory mOpenerFactory;
private final PointerFileHandler mHandler;
QualityLoader(DataFileOpenerFactory fact, PointerFileHandler handler, ByteArray qualityChecksums) {
mOpenerFactory = fact;
mHandler = handler;
mQualityChecksums = qualityChecksums;
}
// http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html
volatile boolean mInit = false;
private volatile ByteArray mQualityChecksums;
private volatile ByteCompression mQualityData;
private synchronized void init() throws IOException {
if (!mInit) {
Diagnostic.developerLog("loading quality data...");
final boolean preloaded;
if (mQualityChecksums == null) {
mQualityChecksums = initQualityChecksumArray();
preloaded = false;
} else {
preloaded = true;
}
final long dataSize = mPositions.get(mPositions.length() - 1);
final ByteArray qualData = new CompressedByteArray(dataSize, MAX_QUAL_VALUE, false);
final DataFileIndex seqIndex = DataFileIndex.loadSequenceDataFileIndex(mIndex.dataIndexVersion(), mDirectory);
final long hash = SequenceDataLoader.loadQuality(qualData, seqIndex, mStart, mEnd, mDirectory, mPositions, mQualityChecksums, mOpenerFactory, mHandler, preloaded);
if (mStart == 0 && mEnd == mIndex.getNumberSequences() && mIndex.getVersion() >= IndexFile.SEPARATE_CHECKSUM_VERSION) {
if (hash != mIndex.getQualityChecksum()) {
throw new CorruptSdfException("Sequence qualities failed checksum - SDF may be corrupt: \"" + mDirectory + "\"");
} else {
Diagnostic.developerLog("Sequence qualities passed checksum");
}
}
mQualityData = new ByteBaseCompression(qualData, mPositions);
mQualityData.freeze();
Diagnostic.developerLog("Loaded qualities for CompressedMemorySequencesReader");
mInit = true;
Diagnostic.developerLog("finished loading quality data");
}
}
ByteCompression getQuality() throws IOException {
if (!mInit) {
init();
}
return mQualityData;
}
ByteArray getQualityChecksums() throws IOException {
if (!mInit) {
init();
}
return mQualityChecksums;
}
}
@Override
public boolean compressed() {
return mIndex.getSequenceEncoding() == IndexFile.SEQUENCE_ENCODING_COMPRESSED;
}
@Override
public IndexFile index() {
return mIndex;
}
}
| fb-contrib: ISB
| src/com/rtg/reader/CompressedMemorySequencesReader.java | fb-contrib: ISB |
|
Java | bsd-3-clause | 5f6f6f9ef94e32ebf55b13ae4ea3f1a6cdf9d832 | 0 | xuncl/cocos2d-android,littledou/cocos2d-android,iliyas705/cocos2d-android,weimingtom/cocos2d-android,Michaelangel007/cocos2d-android,tectronics/cocos2d-android,kerocode/cocos2d-android,vilmarbfilho/cocos2d-android | package org.cocos2d.opengl;
import org.cocos2d.nodes.TextureManager;
import org.cocos2d.types.CCColor4B;
import org.cocos2d.types.CCQuad2;
import org.cocos2d.types.CCQuad3;
import org.cocos2d.utils.CCFormatter;
import javax.microedition.khronos.opengles.GL10;
import static javax.microedition.khronos.opengles.GL10.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
/**
* A class that implements a Texture Atlas.
* Supported features:
* The atlas file can be a PNG or any other format supported by Texture2D
* Quads can be udpated in runtime
* Quads can be added in runtime
* Quads can be removed in runtime
* Quads can be re-ordered in runtime
* The TextureAtlas capacity can be increased or decreased in runtime
* Color array created on demand
* The quads are rendered using an OpenGL ES vertex array list
*/
public class TextureAtlas {
private static final String TAG = TextureAtlas.class.getSimpleName();
private int totalQuads_;
private int capacity_;
private Texture2D texture_;
private FloatBuffer textureCoordinates;
private FloatBuffer vertexCoordinates;
private ByteBuffer colors;
private ShortBuffer indices;
private boolean withColorArray_;
public int getTotalQuads() {
return totalQuads_;
}
public int capacity() {
return capacity_;
}
public Texture2D getTexture() {
return texture_;
}
public void setTexture(Texture2D tex) {
texture_ = tex;
}
public boolean withColorArray() {
return withColorArray_;
}
public TextureAtlas(String file, int n) {
this(TextureManager.sharedTextureManager().addImage(file), n);
}
public TextureAtlas(Texture2D tex, int n) {
capacity_ = n;
texture_ = tex;
withColorArray_ = false;
ByteBuffer tbb = ByteBuffer.allocateDirect(CCQuad2.size * capacity_ * 4);
tbb.order(ByteOrder.nativeOrder());
textureCoordinates = tbb.asFloatBuffer();
ByteBuffer vbb = ByteBuffer.allocateDirect(CCQuad3.size * capacity_ * 4);
vbb.order(ByteOrder.nativeOrder());
vertexCoordinates = vbb.asFloatBuffer();
ByteBuffer isb = ByteBuffer.allocateDirect(6 * capacity_ * 2);
isb.order(ByteOrder.nativeOrder());
indices = isb.asShortBuffer();
initIndices();
}
public String toString() {
return new CCFormatter().format("<%s = %08X | getTotalQuads = %i>", TextureAtlas.class, this, totalQuads_);
}
public void initColorArray() {
if (!withColorArray_) {
// default color: 255,255,255,255
ByteBuffer cbb = ByteBuffer.allocateDirect(4 * capacity_ * 1);
for (int i = 0; i < CCColor4B.size * capacity_ * 1; i++) {
cbb.put(i, (byte) 0xff);
}
colors = cbb;
withColorArray_ = true;
}
}
public void initIndices() {
for (int i = 0; i < capacity_; i++) {
indices.put((short) (i * 6 + 0), (short) (i * 4 + 0));
indices.put((short) (i * 6 + 1), (short) (i * 4 + 1));
indices.put((short) (i * 6 + 2), (short) (i * 4 + 2));
// inverted index.
indices.put((short) (i * 6 + 5), (short) (i * 4 + 1));
indices.put((short) (i * 6 + 4), (short) (i * 4 + 2));
indices.put((short) (i * 6 + 3), (short) (i * 4 + 3));
}
}
public void updateQuad(CCQuad2 quadT, CCQuad3 quadV, int index) {
assert (index >= 0 && index < capacity_) : "update quad with texture_: Invalid index";
totalQuads_ = Math.max(index + 1, totalQuads_);
putTexCoords(textureCoordinates, quadT.ccQuad2(), index);
putVertex(vertexCoordinates, quadV.ccQuad3(), index);
}
public void updateColor(CCColor4B color, int index) {
assert (index >= 0 && index < capacity_) : "update color with quad color: Invalid index";
totalQuads_ = Math.max(index + 1, totalQuads_);
if (!withColorArray_)
initColorArray();
if (withColorArray_)
putColor(colors, color.ccColor4B(), index);
}
public void insertQuad(CCQuad2 texCoords, CCQuad3 vertexCoords, int index) {
assert (index >= 0 && index < capacity_) : "insert quad with texture_: Invalid index";
totalQuads_++;
int remaining = (totalQuads_ - 1) - index;
// last object doesn't need to be moved
if (remaining > 0) {
// tex coordinates
arraycopyTexture(textureCoordinates, index, textureCoordinates, index + 1, remaining);
// vertexCoordinates_
arraycopyVertex(vertexCoordinates, index, vertexCoordinates, index + 1, remaining);
// colors_
if (withColorArray_) {
arraycopyColor(colors, index, colors, index + 1, remaining);
}
}
putTexCoords(textureCoordinates, texCoords.ccQuad2(), index);
putVertex(vertexCoordinates, vertexCoords.ccQuad3(), index);
}
public void insertQuad(int from, int to) {
assert (to >= 0 && to < totalQuads_) : "insertQuadFromIndex:atIndex: Invalid index";
assert (from >= 0 && from < totalQuads_) : "insertQuadFromIndex:atIndex: Invalid index";
if (from == to)
return;
int size = Math.abs(from - to);
int dst = from;
int src = from + 1;
if (from > to) {
dst = to + 1;
src = to;
}
// tex coordinates
float[] texCoordsBackup = getTexCoords(textureCoordinates, from);
arraycopyTexture(textureCoordinates, src, textureCoordinates, dst, size);
putTexCoords(textureCoordinates, texCoordsBackup, to);
// vertexCoordinates_ coordinates
float[] vertexQuadBackup = getVertex(vertexCoordinates, from);
arraycopyVertex(vertexCoordinates, src, vertexCoordinates, dst, size);
putVertex(vertexCoordinates, vertexQuadBackup, to);
// colors_
if (withColorArray_) {
byte[] colorsBackup = getColor(colors, from);
arraycopyColor(colors, src, colors, dst, size);
putColor(colors, colorsBackup, to);
}
}
public void removeQuad(int index) {
assert (index >= 0 && index < totalQuads_) : "removeQuadAtIndex: Invalid index";
int remaining = (totalQuads_ - 1) - index;
// last object doesn't need to be moved
if (remaining > 0) {
// tex coordinates
arraycopyTexture(textureCoordinates, index + 1, textureCoordinates, index, remaining);
// vertexCoordinates_
arraycopyVertex(vertexCoordinates, index + 1, vertexCoordinates, index, remaining);
// colors_
if (withColorArray_) {
arraycopyColor(colors, index + 1, colors, index, remaining);
}
}
totalQuads_--;
}
public void removeAllQuads() {
totalQuads_ = 0;
}
public void resizeCapacity(int newCapacity) {
if (newCapacity == capacity_)
return;
// update capacity and getTotalQuads
totalQuads_ = Math.min(totalQuads_, newCapacity);
capacity_ = newCapacity;
ByteBuffer tbb = ByteBuffer.allocateDirect(CCQuad2.size * newCapacity * 4);
tbb.order(ByteOrder.nativeOrder());
FloatBuffer tmpTexCoords = tbb.asFloatBuffer();
tmpTexCoords.put(textureCoordinates);
textureCoordinates = tmpTexCoords;
ByteBuffer vbb = ByteBuffer.allocateDirect(CCQuad3.size * newCapacity * 4);
vbb.order(ByteOrder.nativeOrder());
FloatBuffer tmpVertexCoords = vbb.asFloatBuffer();
tmpVertexCoords.put(vertexCoordinates);
vertexCoordinates = tmpVertexCoords;
ByteBuffer isb = ByteBuffer.allocateDirect(6 * newCapacity * 2);
isb.order(ByteOrder.nativeOrder());
ShortBuffer tmpIndices = isb.asShortBuffer();
// tmpIndices.put(indices);
indices = tmpIndices;
initIndices();
if (withColorArray_) {
ByteBuffer cbb = ByteBuffer.allocateDirect(CCColor4B.size * newCapacity * 1);
ByteBuffer tmpColors = cbb;
tmpColors.put(colors);
colors = tmpColors;
}
}
public void drawQuads(GL10 gl) {
draw(gl, totalQuads_);
}
public void draw(GL10 gl, int n) {
texture_.loadTexture(gl);
gl.glBindTexture(GL10.GL_TEXTURE_2D, texture_.name());
gl.glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
gl.glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexCoordinates);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureCoordinates);
if (withColorArray_)
gl.glColorPointer(4, GL10.GL_UNSIGNED_BYTE, 0, colors);
gl.glDrawElements(GL10.GL_TRIANGLES, n * 6, GL10.GL_UNSIGNED_SHORT, indices);
}
private float[] getTexCoords(FloatBuffer src, int index) {
float[] quadT = new float[CCQuad2.size];
for (int i = 0; i < CCQuad2.size; i++) {
quadT[i] = src.get(index * CCQuad2.size + i);
}
return quadT;
}
private void putTexCoords(FloatBuffer dst, float[] quadT, int index) {
for (int i = 0; i < CCQuad2.size; i++) {
dst.put(index * CCQuad2.size + i, quadT[i]);
}
}
private float[] getVertex(FloatBuffer src, int index) {
float[] quadV = new float[CCQuad3.size];
for (int i = 0; i < CCQuad3.size; i++) {
quadV[i] = src.get(index * CCQuad3.size + i);
}
return quadV;
}
private void putVertex(FloatBuffer dst, float[] quadV, int index) {
for (int i = 0; i < CCQuad3.size; i++) {
dst.put(index * CCQuad3.size + i, quadV[i]);
}
}
private byte[] getColor(ByteBuffer src, int index) {
byte[] color = new byte[CCColor4B.size];
for (int i = 0; i < CCColor4B.size; i++) {
color[i] = src.get(index * CCColor4B.size + i);
}
return color;
}
private void putColor(ByteBuffer dst, byte[] color, int index) {
for (int i = 0; i < CCColor4B.size; i++) {
dst.put(index * CCColor4B.size + i, color[i]);
}
}
private void arraycopyTexture(FloatBuffer src, int srcPos, FloatBuffer dst, int dstPos, int length) {
if (src == dst) {
memmoveFloat(src, srcPos * CCQuad2.size, dst, dstPos * CCQuad2.size, length * CCQuad2.size);
} else {
memcopyFloat(src, srcPos * CCQuad2.size, dst, dstPos * CCQuad2.size, length * CCQuad2.size);
}
}
private void arraycopyVertex(FloatBuffer src, int srcPos, FloatBuffer dst, int dstPos, int length) {
if (src == dst) {
memmoveFloat(src, srcPos * CCQuad3.size, dst, dstPos * CCQuad3.size, length * CCQuad3.size);
} else {
memcopyFloat(src, srcPos * CCQuad3.size, dst, dstPos * CCQuad3.size, length * CCQuad3.size);
}
}
private void arraycopyColor(ByteBuffer src, int srcPos, ByteBuffer dst, int dstPos, int length) {
if (src == dst) {
memmoveByte(src, srcPos * CCColor4B.size, dst, dstPos * CCColor4B.size, length * CCColor4B.size);
} else {
memcopyByte(src, srcPos * CCColor4B.size, dst, dstPos * CCColor4B.size, length * CCColor4B.size);
}
}
private void memmoveFloat(FloatBuffer src, int from, FloatBuffer dst, int to, int size) {
if (to < from) {
memcopyFloat(src, from, dst, to, size);
} else {
for (int i = size - 1; i >= 0; i--) {
dst.put(i + to, src.get(i + from));
}
}
}
private void memcopyFloat(FloatBuffer src, int from, FloatBuffer dst, int to, int size) {
for (int i = 0; i < size; i++) {
dst.put(i + to, src.get(i + from));
}
}
private void memmoveByte(ByteBuffer src, int from, ByteBuffer dst, int to, int size) {
if (to < from) {
memcopyByte(src, from, dst, to, size);
} else {
for (int i = size - 1; i >= 0; i--) {
dst.put(i + to, src.get(i + from));
}
}
}
private void memcopyByte(ByteBuffer src, int from, ByteBuffer dst, int to, int size) {
for (int i = 0; i < size; i++) {
dst.put(i + to, src.get(i + from));
}
}
}
| src/org/cocos2d/opengl/TextureAtlas.java | package org.cocos2d.opengl;
import org.cocos2d.nodes.TextureManager;
import org.cocos2d.types.CCColor4B;
import org.cocos2d.types.CCQuad2;
import org.cocos2d.types.CCQuad3;
import org.cocos2d.utils.CCFormatter;
import javax.microedition.khronos.opengles.GL10;
import static javax.microedition.khronos.opengles.GL10.*;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
/**
* A class that implements a Texture Atlas.
* Supported features:
* The atlas file can be a PNG or any other format supported by Texture2D
* Quads can be udpated in runtime
* Quads can be added in runtime
* Quads can be removed in runtime
* Quads can be re-ordered in runtime
* The TextureAtlas capacity can be increased or decreased in runtime
* Color array created on demand
* The quads are rendered using an OpenGL ES vertex array list
*/
public class TextureAtlas {
private static final String TAG = TextureAtlas.class.getSimpleName();
private int totalQuads_;
private int capacity_;
private Texture2D texture_;
private FloatBuffer textureCoordinates;
private FloatBuffer vertexCoordinates;
private ByteBuffer colors;
private ShortBuffer indices;
private boolean withColorArray_;
public int totalQuads() {
return totalQuads_;
}
public int capacity() {
return capacity_;
}
public Texture2D getTexture() {
return texture_;
}
public void setTexture(Texture2D tex) {
texture_ = tex;
}
public boolean withColorArray() {
return withColorArray_;
}
public TextureAtlas(String file, int n) {
this(TextureManager.sharedTextureManager().addImage(file), n);
}
public TextureAtlas(Texture2D tex, int n) {
capacity_ = n;
texture_ = tex;
withColorArray_ = false;
ByteBuffer tbb = ByteBuffer.allocateDirect(CCQuad2.size * capacity_ * 4);
tbb.order(ByteOrder.nativeOrder());
textureCoordinates = tbb.asFloatBuffer();
ByteBuffer vbb = ByteBuffer.allocateDirect(CCQuad3.size * capacity_ * 4);
vbb.order(ByteOrder.nativeOrder());
vertexCoordinates = vbb.asFloatBuffer();
ByteBuffer isb = ByteBuffer.allocateDirect(6 * capacity_ * 2);
isb.order(ByteOrder.nativeOrder());
indices = isb.asShortBuffer();
initIndices();
}
public String toString() {
return new CCFormatter().format("<%s = %08X | totalQuads = %i>", TextureAtlas.class, this, totalQuads_);
}
public void initColorArray() {
if (!withColorArray_) {
// default color: 255,255,255,255
ByteBuffer cbb = ByteBuffer.allocateDirect(4 * capacity_ * 1);
for (int i = 0; i < CCColor4B.size * capacity_ * 1; i++) {
cbb.put(i, (byte) 0xff);
}
colors = cbb;
withColorArray_ = true;
}
}
public void initIndices() {
for (int i = 0; i < capacity_; i++) {
indices.put((short) (i * 6 + 0), (short) (i * 4 + 0));
indices.put((short) (i * 6 + 1), (short) (i * 4 + 1));
indices.put((short) (i * 6 + 2), (short) (i * 4 + 2));
// inverted index.
indices.put((short) (i * 6 + 5), (short) (i * 4 + 1));
indices.put((short) (i * 6 + 4), (short) (i * 4 + 2));
indices.put((short) (i * 6 + 3), (short) (i * 4 + 3));
}
}
public void updateQuad(CCQuad2 quadT, CCQuad3 quadV, int index) {
assert (index >= 0 && index < capacity_) : "update quad with texture_: Invalid index";
totalQuads_ = Math.max(index + 1, totalQuads_);
putTexCoords(textureCoordinates, quadT.ccQuad2(), index);
putVertex(vertexCoordinates, quadV.ccQuad3(), index);
}
public void updateColor(CCColor4B color, int index) {
assert (index >= 0 && index < capacity_) : "update color with quad color: Invalid index";
totalQuads_ = Math.max(index + 1, totalQuads_);
if (!withColorArray_)
initColorArray();
if (withColorArray_)
putColor(colors, color.ccColor4B(), index);
}
public void insertQuad(CCQuad2 texCoords, CCQuad3 vertexCoords, int index) {
assert (index >= 0 && index < capacity_) : "insert quad with texture_: Invalid index";
totalQuads_++;
int remaining = (totalQuads_ - 1) - index;
// last object doesn't need to be moved
if (remaining > 0) {
// tex coordinates
arraycopyTexture(textureCoordinates, index, textureCoordinates, index + 1, remaining);
// vertexCoordinates_
arraycopyVertex(vertexCoordinates, index, vertexCoordinates, index + 1, remaining);
// colors_
if (withColorArray_) {
arraycopyColor(colors, index, colors, index + 1, remaining);
}
}
putTexCoords(textureCoordinates, texCoords.ccQuad2(), index);
putVertex(vertexCoordinates, vertexCoords.ccQuad3(), index);
}
public void insertQuad(int from, int to) {
assert (to >= 0 && to < totalQuads_) : "insertQuadFromIndex:atIndex: Invalid index";
assert (from >= 0 && from < totalQuads_) : "insertQuadFromIndex:atIndex: Invalid index";
if (from == to)
return;
int size = Math.abs(from - to);
int dst = from;
int src = from + 1;
if (from > to) {
dst = to + 1;
src = to;
}
// tex coordinates
float[] texCoordsBackup = getTexCoords(textureCoordinates, from);
arraycopyTexture(textureCoordinates, src, textureCoordinates, dst, size);
putTexCoords(textureCoordinates, texCoordsBackup, to);
// vertexCoordinates_ coordinates
float[] vertexQuadBackup = getVertex(vertexCoordinates, from);
arraycopyVertex(vertexCoordinates, src, vertexCoordinates, dst, size);
putVertex(vertexCoordinates, vertexQuadBackup, to);
// colors_
if (withColorArray_) {
byte[] colorsBackup = getColor(colors, from);
arraycopyColor(colors, src, colors, dst, size);
putColor(colors, colorsBackup, to);
}
}
public void removeQuad(int index) {
assert (index >= 0 && index < totalQuads_) : "removeQuadAtIndex: Invalid index";
int remaining = (totalQuads_ - 1) - index;
// last object doesn't need to be moved
if (remaining > 0) {
// tex coordinates
arraycopyTexture(textureCoordinates, index + 1, textureCoordinates, index, remaining);
// vertexCoordinates_
arraycopyVertex(vertexCoordinates, index + 1, vertexCoordinates, index, remaining);
// colors_
if (withColorArray_) {
arraycopyColor(colors, index + 1, colors, index, remaining);
}
}
totalQuads_--;
}
public void removeAllQuads() {
totalQuads_ = 0;
}
public void resizeCapacity(int newCapacity) {
if (newCapacity == capacity_)
return;
// update capacity and totalQuads
totalQuads_ = Math.min(totalQuads_, newCapacity);
capacity_ = newCapacity;
ByteBuffer tbb = ByteBuffer.allocateDirect(CCQuad2.size * newCapacity * 4);
tbb.order(ByteOrder.nativeOrder());
FloatBuffer tmpTexCoords = tbb.asFloatBuffer();
tmpTexCoords.put(textureCoordinates);
textureCoordinates = tmpTexCoords;
ByteBuffer vbb = ByteBuffer.allocateDirect(CCQuad3.size * newCapacity * 4);
vbb.order(ByteOrder.nativeOrder());
FloatBuffer tmpVertexCoords = vbb.asFloatBuffer();
tmpVertexCoords.put(vertexCoordinates);
vertexCoordinates = tmpVertexCoords;
ByteBuffer isb = ByteBuffer.allocateDirect(6 * newCapacity * 2);
isb.order(ByteOrder.nativeOrder());
ShortBuffer tmpIndices = isb.asShortBuffer();
// tmpIndices.put(indices);
indices = tmpIndices;
initIndices();
if (withColorArray_) {
ByteBuffer cbb = ByteBuffer.allocateDirect(CCColor4B.size * newCapacity * 1);
ByteBuffer tmpColors = cbb;
tmpColors.put(colors);
colors = tmpColors;
}
}
public void drawQuads(GL10 gl) {
draw(gl, totalQuads_);
}
public void draw(GL10 gl, int n) {
texture_.loadTexture(gl);
gl.glBindTexture(GL10.GL_TEXTURE_2D, texture_.name());
gl.glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
gl.glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexCoordinates);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureCoordinates);
if (withColorArray_)
gl.glColorPointer(4, GL10.GL_UNSIGNED_BYTE, 0, colors);
gl.glDrawElements(GL10.GL_TRIANGLES, n * 6, GL10.GL_UNSIGNED_SHORT, indices);
}
private float[] getTexCoords(FloatBuffer src, int index) {
float[] quadT = new float[CCQuad2.size];
for (int i = 0; i < CCQuad2.size; i++) {
quadT[i] = src.get(index * CCQuad2.size + i);
}
return quadT;
}
private void putTexCoords(FloatBuffer dst, float[] quadT, int index) {
for (int i = 0; i < CCQuad2.size; i++) {
dst.put(index * CCQuad2.size + i, quadT[i]);
}
}
private float[] getVertex(FloatBuffer src, int index) {
float[] quadV = new float[CCQuad3.size];
for (int i = 0; i < CCQuad3.size; i++) {
quadV[i] = src.get(index * CCQuad3.size + i);
}
return quadV;
}
private void putVertex(FloatBuffer dst, float[] quadV, int index) {
for (int i = 0; i < CCQuad3.size; i++) {
dst.put(index * CCQuad3.size + i, quadV[i]);
}
}
private byte[] getColor(ByteBuffer src, int index) {
byte[] color = new byte[CCColor4B.size];
for (int i = 0; i < CCColor4B.size; i++) {
color[i] = src.get(index * CCColor4B.size + i);
}
return color;
}
private void putColor(ByteBuffer dst, byte[] color, int index) {
for (int i = 0; i < CCColor4B.size; i++) {
dst.put(index * CCColor4B.size + i, color[i]);
}
}
private void arraycopyTexture(FloatBuffer src, int srcPos, FloatBuffer dst, int dstPos, int length) {
if (src == dst) {
memmoveFloat(src, srcPos * CCQuad2.size, dst, dstPos * CCQuad2.size, length * CCQuad2.size);
} else {
memcopyFloat(src, srcPos * CCQuad2.size, dst, dstPos * CCQuad2.size, length * CCQuad2.size);
}
}
private void arraycopyVertex(FloatBuffer src, int srcPos, FloatBuffer dst, int dstPos, int length) {
if (src == dst) {
memmoveFloat(src, srcPos * CCQuad3.size, dst, dstPos * CCQuad3.size, length * CCQuad3.size);
} else {
memcopyFloat(src, srcPos * CCQuad3.size, dst, dstPos * CCQuad3.size, length * CCQuad3.size);
}
}
private void arraycopyColor(ByteBuffer src, int srcPos, ByteBuffer dst, int dstPos, int length) {
if (src == dst) {
memmoveByte(src, srcPos * CCColor4B.size, dst, dstPos * CCColor4B.size, length * CCColor4B.size);
} else {
memcopyByte(src, srcPos * CCColor4B.size, dst, dstPos * CCColor4B.size, length * CCColor4B.size);
}
}
private void memmoveFloat(FloatBuffer src, int from, FloatBuffer dst, int to, int size) {
if (to < from) {
memcopyFloat(src, from, dst, to, size);
} else {
for (int i = size - 1; i >= 0; i--) {
dst.put(i + to, src.get(i + from));
}
}
}
private void memcopyFloat(FloatBuffer src, int from, FloatBuffer dst, int to, int size) {
for (int i = 0; i < size; i++) {
dst.put(i + to, src.get(i + from));
}
}
private void memmoveByte(ByteBuffer src, int from, ByteBuffer dst, int to, int size) {
if (to < from) {
memcopyByte(src, from, dst, to, size);
} else {
for (int i = size - 1; i >= 0; i--) {
dst.put(i + to, src.get(i + from));
}
}
}
private void memcopyByte(ByteBuffer src, int from, ByteBuffer dst, int to, int size) {
for (int i = 0; i < size; i++) {
dst.put(i + to, src.get(i + from));
}
}
}
| Use standard accessor convention for totalQuads. | src/org/cocos2d/opengl/TextureAtlas.java | Use standard accessor convention for totalQuads. |
|
Java | bsd-3-clause | 8084386288de590ba50866eb281dc9bb433d6994 | 0 | DataBiosphere/jade-data-repo,DataBiosphere/jade-data-repo,DataBiosphere/jade-data-repo,DataBiosphere/jade-data-repo | package bio.terra.service.filedata;
import bio.terra.app.configuration.EcmConfiguration;
import bio.terra.app.controller.exception.TooManyRequestsException;
import bio.terra.app.logging.PerformanceLogger;
import bio.terra.app.model.GoogleRegion;
import bio.terra.common.CloudPlatformWrapper;
import bio.terra.common.exception.FeatureNotImplementedException;
import bio.terra.common.exception.InvalidCloudPlatformException;
import bio.terra.common.exception.UnauthorizedException;
import bio.terra.common.iam.AuthenticatedUserRequest;
import bio.terra.model.BillingProfileModel;
import bio.terra.model.DRSAccessMethod;
import bio.terra.model.DRSAccessURL;
import bio.terra.model.DRSAuthorizations;
import bio.terra.model.DRSChecksum;
import bio.terra.model.DRSContentsObject;
import bio.terra.model.DRSObject;
import bio.terra.model.DRSPassportRequestModel;
import bio.terra.model.SnapshotSummaryModel;
import bio.terra.service.auth.iam.IamAction;
import bio.terra.service.auth.iam.IamResourceType;
import bio.terra.service.auth.iam.IamService;
import bio.terra.service.common.gcs.GcsUriUtils;
import bio.terra.service.configuration.ConfigEnum;
import bio.terra.service.configuration.ConfigurationService;
import bio.terra.service.filedata.azure.blobstore.AzureBlobStorePdao;
import bio.terra.service.filedata.azure.util.BlobSasTokenOptions;
import bio.terra.service.filedata.exception.DrsObjectNotFoundException;
import bio.terra.service.filedata.exception.FileSystemExecutionException;
import bio.terra.service.filedata.exception.InvalidDrsIdException;
import bio.terra.service.filedata.google.gcs.GcsProjectFactory;
import bio.terra.service.job.JobService;
import bio.terra.service.resourcemanagement.ResourceService;
import bio.terra.service.resourcemanagement.azure.AzureStorageAccountResource;
import bio.terra.service.resourcemanagement.azure.AzureStorageAccountResource.ContainerType;
import bio.terra.service.resourcemanagement.google.GoogleBucketResource;
import bio.terra.service.snapshot.Snapshot;
import bio.terra.service.snapshot.SnapshotProject;
import bio.terra.service.snapshot.SnapshotService;
import bio.terra.service.snapshot.SnapshotSummary;
import bio.terra.service.snapshot.exception.SnapshotNotFoundException;
import com.azure.storage.blob.sas.BlobSasPermission;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Bucket;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.Storage.BucketGetOption;
import com.google.cloud.storage.StorageOptions;
import com.google.common.annotations.VisibleForTesting;
import java.net.URL;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import org.apache.commons.collections4.map.PassiveExpiringMap;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/*
* WARNING: if making any changes to this class make sure to notify the #dsp-batch channel! Describe the change and
* any consequences downstream to DRS clients.
*/
@Component
public class DrsService {
private final Logger logger = LoggerFactory.getLogger(DrsService.class);
private static final String ACCESS_ID_PREFIX_GCP = "gcp-";
private static final String ACCESS_ID_PREFIX_AZURE = "az-";
private static final String ACCESS_ID_PREFIX_PASSPORT = "passport-";
private static final String DRS_OBJECT_VERSION = "0";
private static final Duration URL_TTL = Duration.ofMinutes(15);
private static final String USER_PROJECT_QUERY_PARAM = "userProject";
private static final String REQUESTED_BY_QUERY_PARAM = "requestedBy";
// atomic counter that we incr on request arrival and decr on request response
private final AtomicInteger currentDRSRequests = new AtomicInteger(0);
private final SnapshotService snapshotService;
private final FileService fileService;
private final DrsIdService drsIdService;
private final IamService samService;
private final ResourceService resourceService;
private final ConfigurationService configurationService;
private final JobService jobService;
private final PerformanceLogger performanceLogger;
private final AzureBlobStorePdao azureBlobStorePdao;
private final GcsProjectFactory gcsProjectFactory;
private final EcmConfiguration ecmConfiguration;
private final Map<UUID, SnapshotProject> snapshotProjectsCache =
Collections.synchronizedMap(new PassiveExpiringMap<>(15, TimeUnit.MINUTES));
private final Map<UUID, SnapshotCacheResult> snapshotCache =
Collections.synchronizedMap(new PassiveExpiringMap<>(15, TimeUnit.MINUTES));
private final Map<UUID, SnapshotSummaryModel> snapshotSummariesCache =
Collections.synchronizedMap(new PassiveExpiringMap<>(15, TimeUnit.MINUTES));
@Autowired
public DrsService(
SnapshotService snapshotService,
FileService fileService,
DrsIdService drsIdService,
IamService samService,
ResourceService resourceService,
ConfigurationService configurationService,
JobService jobService,
PerformanceLogger performanceLogger,
AzureBlobStorePdao azureBlobStorePdao,
GcsProjectFactory gcsProjectFactory,
EcmConfiguration ecmConfiguration) {
this.snapshotService = snapshotService;
this.fileService = fileService;
this.drsIdService = drsIdService;
this.samService = samService;
this.resourceService = resourceService;
this.configurationService = configurationService;
this.jobService = jobService;
this.performanceLogger = performanceLogger;
this.azureBlobStorePdao = azureBlobStorePdao;
this.gcsProjectFactory = gcsProjectFactory;
this.ecmConfiguration = ecmConfiguration;
}
private class DrsRequestResource implements AutoCloseable {
DrsRequestResource() {
// make sure not too many requests are being made at once
int podCount = jobService.getActivePodCount();
int maxDRSLookups = configurationService.getParameterValue(ConfigEnum.DRS_LOOKUP_MAX);
int max = maxDRSLookups / podCount;
logger.info("Max number of DRS lookups allowed : " + max);
logger.info("Current number of requests being made : " + currentDRSRequests);
if (currentDRSRequests.get() >= max) {
throw new TooManyRequestsException(
"Too many requests are being made at once. Please try again later.");
}
currentDRSRequests.incrementAndGet();
}
@Override
public void close() {
currentDRSRequests.decrementAndGet();
}
}
/**
* Determine the acceptable means of authentication for a given DRS ID, including the passport
* issuers when supported.
*
* @param drsObjectId the object ID for which to look up authorizations
* @return the `DrsAuthorizations` for this ID
* @throws IllegalArgumentException if there is an issue with the object id
* @throws SnapshotNotFoundException if the snapshot for the DRS object cannot be found
* @throws TooManyRequestsException if there are too many concurrent DRS lookup requests
*/
public DRSAuthorizations lookupAuthorizationsByDrsId(String drsObjectId) {
try (DrsRequestResource r = new DrsRequestResource()) {
SnapshotCacheResult snapshot = lookupSnapshotForDRSObject(drsObjectId);
SnapshotSummaryModel snapshotSummary = getSnapshotSummary(snapshot.id);
return buildDRSAuth(SnapshotSummary.passportAuthorizationAvailable(snapshotSummary));
}
}
private DRSAuthorizations buildDRSAuth(boolean passportAuthorizationAvailable) {
DRSAuthorizations auths = new DRSAuthorizations();
if (passportAuthorizationAvailable) {
auths.addSupportedTypesItem(DRSAuthorizations.SupportedTypesEnum.PASSPORTAUTH);
auths.addPassportAuthIssuersItem(ecmConfiguration.getRasIssuer());
}
auths.addSupportedTypesItem(DRSAuthorizations.SupportedTypesEnum.BEARERAUTH);
return auths;
}
/**
* Look up the DRS object for a DRS object ID.
*
* @param drsObjectId the object ID to look up
* @param drsPassportRequestModel includes RAS passport, used for authorization and 'expand' var -
* if expand is false and drsObjectId refers to a bundle, then the returned array contains
* only those objects directly contained in the bundle
* @return the DRS object for this ID
* @throws IllegalArgumentException if there is an issue with the object id
* @throws SnapshotNotFoundException if the snapshot for the DRS object cannot be found
* @throws TooManyRequestsException if there are too many concurrent DRS lookup requests
*/
public DRSObject lookupObjectByDrsIdPassport(
String drsObjectId, DRSPassportRequestModel drsPassportRequestModel) {
try (DrsRequestResource r = new DrsRequestResource()) {
SnapshotCacheResult snapshot = lookupSnapshotForDRSObject(drsObjectId);
verifyPassportAuth(snapshot.id, drsPassportRequestModel);
return lookupDRSObjectAfterAuth(
drsPassportRequestModel.isExpand(), snapshot, drsObjectId, null, true);
}
}
/**
* Look up the DRS object for a DRS object ID.
*
* @param authUser the user to authenticate this request for
* @param drsObjectId the object ID to look up
* @param expand if false and drsObjectId refers to a bundle, then the returned array contains
* only those objects directly contained in the bundle
* @return the DRS object for this ID
* @throws IllegalArgumentException if there iis an issue with the object id
* @throws SnapshotNotFoundException if the snapshot for the DRS object cannot be found
* @throws TooManyRequestsException if there are too many concurrent DRS lookup requests
*/
public DRSObject lookupObjectByDrsId(
AuthenticatedUserRequest authUser, String drsObjectId, Boolean expand) {
try (DrsRequestResource r = new DrsRequestResource()) {
SnapshotCacheResult cachedSnapshot = lookupSnapshotForDRSObject(drsObjectId);
String samTimer = performanceLogger.timerStart();
samService.verifyAuthorization(
authUser,
IamResourceType.DATASNAPSHOT,
cachedSnapshot.id.toString(),
IamAction.READ_DATA);
performanceLogger.timerEndAndLog(
samTimer,
drsObjectId, // not a flight, so no job id
this.getClass().getName(),
"samService.verifyAuthorization");
return lookupDRSObjectAfterAuth(expand, cachedSnapshot, drsObjectId, authUser, false);
}
}
@VisibleForTesting
SnapshotCacheResult lookupSnapshotForDRSObject(String drsObjectId) {
DrsId drsId = drsIdService.fromObjectId(drsObjectId);
SnapshotCacheResult snapshot;
try {
UUID snapshotId = UUID.fromString(drsId.getSnapshotId());
// We only look up DRS ids for unlocked snapshots.
String retrieveTimer = performanceLogger.timerStart();
snapshot = getSnapshot(snapshotId);
performanceLogger.timerEndAndLog(
retrieveTimer,
drsObjectId, // not a flight, so no job id
this.getClass().getName(),
"snapshotService.retrieveAvailable");
return snapshot;
} catch (IllegalArgumentException ex) {
throw new InvalidDrsIdException("Invalid object id format '" + drsObjectId + "'", ex);
} catch (SnapshotNotFoundException ex) {
throw new DrsObjectNotFoundException(
"No snapshot found for DRS object id '" + drsObjectId + "'", ex);
}
}
void verifyPassportAuth(UUID snapshotId, DRSPassportRequestModel drsPassportRequestModel) {
SnapshotSummaryModel snapshotSummary = getSnapshotSummary(snapshotId);
List<String> passports = drsPassportRequestModel.getPassports();
if (!snapshotService.verifyPassportAuth(snapshotSummary, passports).isValid()) {
throw new UnauthorizedException("User is not authorized to see drs object.");
}
}
private DRSObject lookupDRSObjectAfterAuth(
boolean expand,
SnapshotCacheResult snapshot,
String drsObjectId,
AuthenticatedUserRequest authUser,
boolean passportAuth) {
DrsId drsId = drsIdService.fromObjectId(drsObjectId);
SnapshotProject snapshotProject = getSnapshotProject(snapshot.id);
int depth = (expand ? -1 : 1);
FSItem fsObject;
try {
String lookupTimer = performanceLogger.timerStart();
fsObject = fileService.lookupSnapshotFSItem(snapshotProject, drsId.getFsObjectId(), depth);
performanceLogger.timerEndAndLog(
lookupTimer,
drsObjectId, // not a flight, so no job id
this.getClass().getName(),
"fileService.lookupSnapshotFSItem");
} catch (InterruptedException ex) {
throw new FileSystemExecutionException(
"Unexpected interruption during file system processing", ex);
}
if (fsObject instanceof FSFile) {
return drsObjectFromFSFile((FSFile) fsObject, snapshot, authUser, passportAuth);
} else if (fsObject instanceof FSDir) {
return drsObjectFromFSDir((FSDir) fsObject, drsId.getSnapshotId());
}
throw new IllegalArgumentException("Invalid object type");
}
public DRSAccessURL postAccessUrlForObjectId(
String objectId, String accessId, DRSPassportRequestModel passportRequestModel) {
DRSObject drsObject = lookupObjectByDrsIdPassport(objectId, passportRequestModel);
return getAccessURL(null, drsObject, objectId, accessId);
}
public DRSAccessURL getAccessUrlForObjectId(
AuthenticatedUserRequest authUser, String objectId, String accessId) {
DRSObject drsObject = lookupObjectByDrsId(authUser, objectId, false);
return getAccessURL(authUser, drsObject, objectId, accessId);
}
private DRSAccessURL getAccessURL(
AuthenticatedUserRequest authUser, DRSObject drsObject, String objectId, String accessId) {
DrsId drsId = drsIdService.fromObjectId(objectId);
UUID snapshotId = UUID.fromString(drsId.getSnapshotId());
SnapshotCacheResult cachedSnapshot = getSnapshot(snapshotId);
BillingProfileModel billingProfileModel = cachedSnapshot.billingProfileModel;
assertAccessMethodMatchingAccessId(accessId, drsObject);
FSFile fsFile;
try {
fsFile =
(FSFile)
fileService.lookupSnapshotFSItem(
snapshotService.retrieveAvailableSnapshotProject(cachedSnapshot.id),
drsId.getFsObjectId(),
1);
} catch (InterruptedException e) {
throw new IllegalArgumentException(e);
}
CloudPlatformWrapper platform = CloudPlatformWrapper.of(billingProfileModel.getCloudPlatform());
if (platform.isGcp()) {
return signGoogleUrl(cachedSnapshot, fsFile.getCloudPath(), authUser);
} else if (platform.isAzure()) {
return signAzureUrl(billingProfileModel, fsFile, authUser);
} else {
throw new FeatureNotImplementedException("Cloud platform not implemented");
}
}
private DRSAccessMethod assertAccessMethodMatchingAccessId(String accessId, DRSObject object) {
Supplier<IllegalArgumentException> illegalArgumentExceptionSupplier =
() -> new IllegalArgumentException("No matching access ID was found for object");
return object.getAccessMethods().stream()
.filter(drsAccessMethod -> drsAccessMethod.getAccessId().equals(accessId))
.findFirst()
.orElseThrow(illegalArgumentExceptionSupplier);
}
private DRSAccessURL signAzureUrl(
BillingProfileModel profileModel, FSItem fsItem, AuthenticatedUserRequest authUser) {
AzureStorageAccountResource storageAccountResource =
resourceService.lookupStorageAccountMetadata(((FSFile) fsItem).getBucketResourceId());
return new DRSAccessURL()
.url(
azureBlobStorePdao.signFile(
profileModel,
storageAccountResource,
((FSFile) fsItem).getCloudPath(),
ContainerType.DATA,
new BlobSasTokenOptions(
URL_TTL,
new BlobSasPermission().setReadPermission(true),
authUser.getEmail())));
}
private DRSAccessURL signGoogleUrl(
SnapshotCacheResult cachedSnapshot, String gsPath, AuthenticatedUserRequest authUser) {
Storage storage;
if (cachedSnapshot.isSelfHosted) {
// In the case of a self-hosted dataset, use the dataset's service account to sign the url
storage = gcsProjectFactory.getStorage(cachedSnapshot.datasetProjectId);
} else {
storage =
StorageOptions.newBuilder()
.setProjectId(cachedSnapshot.googleProjectId)
.build()
.getService();
}
BlobId locator = GcsUriUtils.parseBlobUri(gsPath);
BlobInfo blobInfo = BlobInfo.newBuilder(locator).build();
Map<String, String> queryParams = new HashMap<>();
queryParams.put(USER_PROJECT_QUERY_PARAM, cachedSnapshot.googleProjectId);
if (authUser != null) {
queryParams.put(REQUESTED_BY_QUERY_PARAM, authUser.getEmail());
}
URL url =
storage.signUrl(
blobInfo,
URL_TTL.toMinutes(),
TimeUnit.MINUTES,
Storage.SignUrlOption.withQueryParams(queryParams),
Storage.SignUrlOption.withV4Signature());
return new DRSAccessURL().url(url.toString());
}
private DRSObject drsObjectFromFSFile(
FSFile fsFile,
SnapshotCacheResult cachedSnapshot,
AuthenticatedUserRequest authUser,
boolean passportAuth) {
DRSObject fileObject = makeCommonDrsObject(fsFile, cachedSnapshot.id.toString());
List<DRSAccessMethod> accessMethods;
CloudPlatformWrapper platform = CloudPlatformWrapper.of(fsFile.getCloudPlatform());
if (platform.isGcp()) {
String gcpRegion = retrieveGCPSnapshotRegion(cachedSnapshot, fsFile);
if (passportAuth) {
accessMethods =
getDrsSignedURLAccessMethods(
ACCESS_ID_PREFIX_GCP + ACCESS_ID_PREFIX_PASSPORT, gcpRegion, passportAuth);
} else {
accessMethods =
getDrsAccessMethodsOnGcp(fsFile, authUser, gcpRegion, cachedSnapshot.googleProjectId);
}
} else if (platform.isAzure()) {
String azureRegion = retrieveAzureSnapshotRegion(fsFile);
if (passportAuth) {
accessMethods =
getDrsSignedURLAccessMethods(
ACCESS_ID_PREFIX_AZURE + ACCESS_ID_PREFIX_PASSPORT, azureRegion, passportAuth);
} else {
accessMethods =
getDrsSignedURLAccessMethods(ACCESS_ID_PREFIX_AZURE, azureRegion, passportAuth);
}
} else {
throw new InvalidCloudPlatformException();
}
fileObject
.mimeType(fsFile.getMimeType())
.checksums(fileService.makeChecksums(fsFile))
.selfUri(drsIdService.makeDrsId(fsFile, cachedSnapshot.id.toString()).toDrsUri())
.accessMethods(accessMethods);
return fileObject;
}
private String retrieveGCPSnapshotRegion(SnapshotCacheResult cachedSnapshot, FSFile fsFile) {
final GoogleRegion region;
if (cachedSnapshot.isSelfHosted) {
// Authorize using the dataset's service account...
Storage storage = gcsProjectFactory.getStorage(cachedSnapshot.datasetProjectId);
Bucket bucket =
storage.get(
GcsUriUtils.parseBlobUri(fsFile.getCloudPath()).getBucket(),
// ...but bill to the snapshot's project
BucketGetOption.userProject(cachedSnapshot.googleProjectId));
region = GoogleRegion.fromValue(bucket.getLocation());
} else {
GoogleBucketResource bucketResource =
resourceService.lookupBucketMetadata(fsFile.getBucketResourceId());
region = bucketResource.getRegion();
}
return region.getValue();
}
private String retrieveAzureSnapshotRegion(FSFile fsFile) {
AzureStorageAccountResource storageAccountResource =
resourceService.lookupStorageAccountMetadata(fsFile.getBucketResourceId());
return storageAccountResource.getRegion().getValue();
}
private List<DRSAccessMethod> getDrsAccessMethodsOnGcp(
FSFile fsFile, AuthenticatedUserRequest authUser, String region, String userProject) {
DRSAccessURL gsAccessURL = new DRSAccessURL().url(fsFile.getCloudPath());
DRSAuthorizations authorizationsBearerOnly = buildDRSAuth(false);
String accessId = ACCESS_ID_PREFIX_GCP + region;
DRSAccessMethod gsAccessMethod =
new DRSAccessMethod()
.type(DRSAccessMethod.TypeEnum.GS)
.accessUrl(gsAccessURL)
.accessId(accessId)
.region(region)
.authorizations(authorizationsBearerOnly);
DRSAccessURL httpsAccessURL =
new DRSAccessURL()
.url(GcsUriUtils.makeHttpsFromGs(fsFile.getCloudPath(), userProject))
.headers(makeAuthHeader(authUser));
DRSAccessMethod httpsAccessMethod =
new DRSAccessMethod()
.type(DRSAccessMethod.TypeEnum.HTTPS)
.accessUrl(httpsAccessURL)
.region(region)
.authorizations(authorizationsBearerOnly);
return List.of(gsAccessMethod, httpsAccessMethod);
}
private List<DRSAccessMethod> getDrsSignedURLAccessMethods(
String prefix, String region, boolean passportAuth) {
String accessId = prefix + region;
DRSAuthorizations authorizations = buildDRSAuth(passportAuth);
DRSAccessMethod httpsAccessMethod =
new DRSAccessMethod()
.type(DRSAccessMethod.TypeEnum.HTTPS)
.accessId(accessId)
.region(region)
.authorizations(authorizations);
return List.of(httpsAccessMethod);
}
private DRSObject drsObjectFromFSDir(FSDir fsDir, String snapshotId) {
DRSObject dirObject = makeCommonDrsObject(fsDir, snapshotId);
DRSChecksum drsChecksum = new DRSChecksum().type("crc32c").checksum("0");
dirObject.size(0L).addChecksumsItem(drsChecksum).contents(makeContentsList(fsDir, snapshotId));
return dirObject;
}
private DRSObject makeCommonDrsObject(FSItem fsObject, String snapshotId) {
// Compute the time once; used for both created and updated times as per DRS spec for immutable
// objects
String theTime = fsObject.getCreatedDate().toString();
DrsId drsId = drsIdService.makeDrsId(fsObject, snapshotId);
return new DRSObject()
.id(drsId.toDrsObjectId())
.name(getLastNameFromPath(fsObject.getPath()))
.createdTime(theTime)
.updatedTime(theTime)
.version(DRS_OBJECT_VERSION)
.description(fsObject.getDescription())
.aliases(Collections.singletonList(fsObject.getPath()))
.size(fsObject.getSize())
.checksums(fileService.makeChecksums(fsObject));
}
private List<DRSContentsObject> makeContentsList(FSDir fsDir, String snapshotId) {
List<DRSContentsObject> contentsList = new ArrayList<>();
for (FSItem fsObject : fsDir.getContents()) {
contentsList.add(makeDrsContentsObject(fsObject, snapshotId));
}
return contentsList;
}
private DRSContentsObject makeDrsContentsObject(FSItem fsObject, String snapshotId) {
DrsId drsId = drsIdService.makeDrsId(fsObject, snapshotId);
List<String> drsUris = new ArrayList<>();
drsUris.add(drsId.toDrsUri());
DRSContentsObject contentsObject =
new DRSContentsObject()
.name(getLastNameFromPath(fsObject.getPath()))
.id(drsId.toDrsObjectId())
.drsUri(drsUris);
if (fsObject instanceof FSDir) {
FSDir fsDir = (FSDir) fsObject;
if (fsDir.isEnumerated()) {
contentsObject.contents(makeContentsList(fsDir, snapshotId));
}
}
return contentsObject;
}
private List<String> makeAuthHeader(AuthenticatedUserRequest authUser) {
// TODO: I added this so that connected tests would work. Seems like we should have a better
// solution.
// I don't like putting test-path-only stuff into the production code.
if (authUser == null || authUser.getToken().isEmpty()) {
return Collections.emptyList();
}
String hdr = String.format("Authorization: Bearer %s", authUser.getToken());
return Collections.singletonList(hdr);
}
public static String getLastNameFromPath(String path) {
String[] pathParts = StringUtils.split(path, '/');
return pathParts[pathParts.length - 1];
}
private SnapshotProject getSnapshotProject(UUID snapshotId) {
return snapshotProjectsCache.computeIfAbsent(
snapshotId, snapshotService::retrieveAvailableSnapshotProject);
}
private SnapshotCacheResult getSnapshot(UUID snapshotId) {
return snapshotCache.computeIfAbsent(
snapshotId, id -> new SnapshotCacheResult(snapshotService.retrieve(id)));
}
private SnapshotSummaryModel getSnapshotSummary(UUID snapshotId) {
return snapshotSummariesCache.computeIfAbsent(
snapshotId, snapshotService::retrieveSnapshotSummary);
}
@VisibleForTesting
static class SnapshotCacheResult {
private final UUID id;
private final Boolean isSelfHosted;
private final BillingProfileModel billingProfileModel;
private final String googleProjectId;
private final String datasetProjectId;
public SnapshotCacheResult(Snapshot snapshot) {
this.id = snapshot.getId();
this.isSelfHosted = snapshot.isSelfHosted();
this.billingProfileModel =
snapshot.getSourceDataset().getDatasetSummary().getDefaultBillingProfile();
var projectResource = snapshot.getProjectResource();
if (projectResource != null) {
this.googleProjectId = projectResource.getGoogleProjectId();
} else {
this.googleProjectId = null;
}
var datasetProjectResource = snapshot.getSourceDataset().getProjectResource();
if (datasetProjectResource != null) {
this.datasetProjectId = datasetProjectResource.getGoogleProjectId();
} else {
this.datasetProjectId = null;
}
}
public UUID getId() {
return this.id;
}
}
}
| src/main/java/bio/terra/service/filedata/DrsService.java | package bio.terra.service.filedata;
import bio.terra.app.configuration.EcmConfiguration;
import bio.terra.app.controller.exception.TooManyRequestsException;
import bio.terra.app.logging.PerformanceLogger;
import bio.terra.app.model.GoogleRegion;
import bio.terra.common.CloudPlatformWrapper;
import bio.terra.common.exception.FeatureNotImplementedException;
import bio.terra.common.exception.InvalidCloudPlatformException;
import bio.terra.common.exception.UnauthorizedException;
import bio.terra.common.iam.AuthenticatedUserRequest;
import bio.terra.model.BillingProfileModel;
import bio.terra.model.DRSAccessMethod;
import bio.terra.model.DRSAccessURL;
import bio.terra.model.DRSAuthorizations;
import bio.terra.model.DRSChecksum;
import bio.terra.model.DRSContentsObject;
import bio.terra.model.DRSObject;
import bio.terra.model.DRSPassportRequestModel;
import bio.terra.model.SnapshotSummaryModel;
import bio.terra.service.auth.iam.IamAction;
import bio.terra.service.auth.iam.IamResourceType;
import bio.terra.service.auth.iam.IamService;
import bio.terra.service.common.gcs.GcsUriUtils;
import bio.terra.service.configuration.ConfigEnum;
import bio.terra.service.configuration.ConfigurationService;
import bio.terra.service.filedata.azure.blobstore.AzureBlobStorePdao;
import bio.terra.service.filedata.azure.util.BlobSasTokenOptions;
import bio.terra.service.filedata.exception.DrsObjectNotFoundException;
import bio.terra.service.filedata.exception.FileSystemExecutionException;
import bio.terra.service.filedata.exception.InvalidDrsIdException;
import bio.terra.service.filedata.google.gcs.GcsProjectFactory;
import bio.terra.service.job.JobService;
import bio.terra.service.resourcemanagement.ResourceService;
import bio.terra.service.resourcemanagement.azure.AzureStorageAccountResource;
import bio.terra.service.resourcemanagement.azure.AzureStorageAccountResource.ContainerType;
import bio.terra.service.resourcemanagement.google.GoogleBucketResource;
import bio.terra.service.snapshot.Snapshot;
import bio.terra.service.snapshot.SnapshotProject;
import bio.terra.service.snapshot.SnapshotService;
import bio.terra.service.snapshot.SnapshotSummary;
import bio.terra.service.snapshot.exception.SnapshotNotFoundException;
import com.azure.storage.blob.sas.BlobSasPermission;
import com.google.cloud.storage.BlobId;
import com.google.cloud.storage.BlobInfo;
import com.google.cloud.storage.Bucket;
import com.google.cloud.storage.Storage;
import com.google.cloud.storage.Storage.BucketGetOption;
import com.google.cloud.storage.StorageOptions;
import com.google.common.annotations.VisibleForTesting;
import java.net.URL;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import org.apache.commons.collections4.map.PassiveExpiringMap;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/*
* WARNING: if making any changes to this class make sure to notify the #dsp-batch channel! Describe the change and
* any consequences downstream to DRS clients.
*/
@Component
public class DrsService {
private final Logger logger = LoggerFactory.getLogger(DrsService.class);
private static final String ACCESS_ID_PREFIX_GCP = "gcp-";
private static final String ACCESS_ID_PREFIX_AZURE = "az-";
private static final String ACCESS_ID_PREFIX_PASSPORT = "passport-";
private static final String DRS_OBJECT_VERSION = "0";
private static final Duration URL_TTL = Duration.ofMinutes(15);
private static final String USER_PROJECT_QUERY_PARAM = "userProject";
private static final String REQUESTED_BY_QUERY_PARAM = "requestedBy";
// atomic counter that we incr on request arrival and decr on request response
private final AtomicInteger currentDRSRequests = new AtomicInteger(0);
private final SnapshotService snapshotService;
private final FileService fileService;
private final DrsIdService drsIdService;
private final IamService samService;
private final ResourceService resourceService;
private final ConfigurationService configurationService;
private final JobService jobService;
private final PerformanceLogger performanceLogger;
private final AzureBlobStorePdao azureBlobStorePdao;
private final GcsProjectFactory gcsProjectFactory;
private final EcmConfiguration ecmConfiguration;
private final Map<UUID, SnapshotProject> snapshotProjectsCache =
Collections.synchronizedMap(new PassiveExpiringMap<>(15, TimeUnit.MINUTES));
private final Map<UUID, SnapshotCacheResult> snapshotCache =
Collections.synchronizedMap(new PassiveExpiringMap<>(15, TimeUnit.MINUTES));
private final Map<UUID, SnapshotSummaryModel> snapshotSummariesCache =
Collections.synchronizedMap(new PassiveExpiringMap<>(15, TimeUnit.MINUTES));
@Autowired
public DrsService(
SnapshotService snapshotService,
FileService fileService,
DrsIdService drsIdService,
IamService samService,
ResourceService resourceService,
ConfigurationService configurationService,
JobService jobService,
PerformanceLogger performanceLogger,
AzureBlobStorePdao azureBlobStorePdao,
GcsProjectFactory gcsProjectFactory,
EcmConfiguration ecmConfiguration) {
this.snapshotService = snapshotService;
this.fileService = fileService;
this.drsIdService = drsIdService;
this.samService = samService;
this.resourceService = resourceService;
this.configurationService = configurationService;
this.jobService = jobService;
this.performanceLogger = performanceLogger;
this.azureBlobStorePdao = azureBlobStorePdao;
this.gcsProjectFactory = gcsProjectFactory;
this.ecmConfiguration = ecmConfiguration;
}
private class DrsRequestResource implements AutoCloseable {
DrsRequestResource() {
// make sure not too many requests are being made at once
int podCount = jobService.getActivePodCount();
int maxDRSLookups = configurationService.getParameterValue(ConfigEnum.DRS_LOOKUP_MAX);
int max = maxDRSLookups / podCount;
logger.info("Max number of DRS lookups allowed : " + max);
logger.info("Current number of requests being made : " + currentDRSRequests);
if (currentDRSRequests.get() >= max) {
throw new TooManyRequestsException(
"Too many requests are being made at once. Please try again later.");
}
currentDRSRequests.incrementAndGet();
}
@Override
public void close() {
currentDRSRequests.decrementAndGet();
}
}
/**
* Determine the acceptable means of authentication for a given DRS ID, including the passport
* issuers when supported.
*
* @param drsObjectId the object ID for which to look up authorizations
* @return the `DrsAuthorizations` for this ID
* @throws IllegalArgumentException if there is an issue with the object id
* @throws SnapshotNotFoundException if the snapshot for the DRS object cannot be found
* @throws TooManyRequestsException if there are too many concurrent DRS lookup requests
*/
public DRSAuthorizations lookupAuthorizationsByDrsId(String drsObjectId) {
try (DrsRequestResource r = new DrsRequestResource()) {
SnapshotCacheResult snapshot = lookupSnapshotForDRSObject(drsObjectId);
SnapshotSummaryModel snapshotSummary = getSnapshotSummary(snapshot.id);
return buildDRSAuth(SnapshotSummary.passportAuthorizationAvailable(snapshotSummary));
}
}
private DRSAuthorizations buildDRSAuth(boolean passportAuthorizationAvailable) {
DRSAuthorizations auths = new DRSAuthorizations();
if (passportAuthorizationAvailable) {
auths.addSupportedTypesItem(DRSAuthorizations.SupportedTypesEnum.PASSPORTAUTH);
auths.addPassportAuthIssuersItem(ecmConfiguration.getRasIssuer());
}
auths.addSupportedTypesItem(DRSAuthorizations.SupportedTypesEnum.BEARERAUTH);
return auths;
}
/**
* Look up the DRS object for a DRS object ID.
*
* @param drsObjectId the object ID to look up
* @param drsPassportRequestModel includes RAS passport, used for authorization and 'expand' var -
* if expand is false and drsObjectId refers to a bundle, then the returned array contains
* only those objects directly contained in the bundle
* @return the DRS object for this ID
* @throws IllegalArgumentException if there is an issue with the object id
* @throws SnapshotNotFoundException if the snapshot for the DRS object cannot be found
* @throws TooManyRequestsException if there are too many concurrent DRS lookup requests
*/
public DRSObject lookupObjectByDrsIdPassport(
String drsObjectId, DRSPassportRequestModel drsPassportRequestModel) {
try (DrsRequestResource r = new DrsRequestResource()) {
SnapshotCacheResult snapshot = lookupSnapshotForDRSObject(drsObjectId);
verifyPassportAuth(snapshot.id, drsPassportRequestModel);
return lookupDRSObjectAfterAuth(
drsPassportRequestModel.isExpand(), snapshot, drsObjectId, null, true);
}
}
/**
* Look up the DRS object for a DRS object ID.
*
* @param authUser the user to authenticate this request for
* @param drsObjectId the object ID to look up
* @param expand if false and drsObjectId refers to a bundle, then the returned array contains
* only those objects directly contained in the bundle
* @return the DRS object for this ID
* @throws IllegalArgumentException if there iis an issue with the object id
* @throws SnapshotNotFoundException if the snapshot for the DRS object cannot be found
* @throws TooManyRequestsException if there are too many concurrent DRS lookup requests
*/
public DRSObject lookupObjectByDrsId(
AuthenticatedUserRequest authUser, String drsObjectId, Boolean expand) {
try (DrsRequestResource r = new DrsRequestResource()) {
SnapshotCacheResult cachedSnapshot = lookupSnapshotForDRSObject(drsObjectId);
String samTimer = performanceLogger.timerStart();
samService.verifyAuthorization(
authUser,
IamResourceType.DATASNAPSHOT,
cachedSnapshot.id.toString(),
IamAction.READ_DATA);
performanceLogger.timerEndAndLog(
samTimer,
drsObjectId, // not a flight, so no job id
this.getClass().getName(),
"samService.verifyAuthorization");
return lookupDRSObjectAfterAuth(expand, cachedSnapshot, drsObjectId, authUser, false);
}
}
@VisibleForTesting
SnapshotCacheResult lookupSnapshotForDRSObject(String drsObjectId) {
DrsId drsId = drsIdService.fromObjectId(drsObjectId);
SnapshotCacheResult snapshot;
try {
UUID snapshotId = UUID.fromString(drsId.getSnapshotId());
// We only look up DRS ids for unlocked snapshots.
String retrieveTimer = performanceLogger.timerStart();
snapshot = getSnapshot(snapshotId);
performanceLogger.timerEndAndLog(
retrieveTimer,
drsObjectId, // not a flight, so no job id
this.getClass().getName(),
"snapshotService.retrieveAvailable");
return snapshot;
} catch (IllegalArgumentException ex) {
throw new InvalidDrsIdException("Invalid object id format '" + drsObjectId + "'", ex);
} catch (SnapshotNotFoundException ex) {
throw new DrsObjectNotFoundException(
"No snapshot found for DRS object id '" + drsObjectId + "'", ex);
}
}
void verifyPassportAuth(UUID snapshotId, DRSPassportRequestModel drsPassportRequestModel) {
SnapshotSummaryModel snapshotSummary = getSnapshotSummary(snapshotId);
List<String> passports = drsPassportRequestModel.getPassports();
if (!snapshotService.verifyPassportAuth(snapshotSummary, passports).isValid()) {
throw new UnauthorizedException("User is not authorized to see drs object.");
}
}
private DRSObject lookupDRSObjectAfterAuth(
boolean expand,
SnapshotCacheResult snapshot,
String drsObjectId,
AuthenticatedUserRequest authUser,
boolean passportAuth) {
DrsId drsId = drsIdService.fromObjectId(drsObjectId);
SnapshotProject snapshotProject = getSnapshotProject(snapshot.id);
int depth = (expand ? -1 : 1);
FSItem fsObject;
try {
String lookupTimer = performanceLogger.timerStart();
fsObject = fileService.lookupSnapshotFSItem(snapshotProject, drsId.getFsObjectId(), depth);
performanceLogger.timerEndAndLog(
lookupTimer,
drsObjectId, // not a flight, so no job id
this.getClass().getName(),
"fileService.lookupSnapshotFSItem");
} catch (InterruptedException ex) {
throw new FileSystemExecutionException(
"Unexpected interruption during file system processing", ex);
}
if (fsObject instanceof FSFile) {
return drsObjectFromFSFile((FSFile) fsObject, snapshot, authUser, passportAuth);
} else if (fsObject instanceof FSDir) {
return drsObjectFromFSDir((FSDir) fsObject, drsId.getSnapshotId());
}
throw new IllegalArgumentException("Invalid object type");
}
public DRSAccessURL postAccessUrlForObjectId(
String objectId, String accessId, DRSPassportRequestModel passportRequestModel) {
DRSObject drsObject = lookupObjectByDrsIdPassport(objectId, passportRequestModel);
return getAccessURL(null, drsObject, objectId, accessId);
}
public DRSAccessURL getAccessUrlForObjectId(
AuthenticatedUserRequest authUser, String objectId, String accessId) {
DRSObject drsObject = lookupObjectByDrsId(authUser, objectId, false);
return getAccessURL(authUser, drsObject, objectId, accessId);
}
private DRSAccessURL getAccessURL(
AuthenticatedUserRequest authUser, DRSObject drsObject, String objectId, String accessId) {
DrsId drsId = drsIdService.fromObjectId(objectId);
UUID snapshotId = UUID.fromString(drsId.getSnapshotId());
SnapshotCacheResult cachedSnapshot = getSnapshot(snapshotId);
BillingProfileModel billingProfileModel = cachedSnapshot.billingProfileModel;
assertAccessMethodMatchingAccessId(accessId, drsObject);
FSFile fsFile;
try {
fsFile =
(FSFile)
fileService.lookupSnapshotFSItem(
snapshotService.retrieveAvailableSnapshotProject(cachedSnapshot.id),
drsId.getFsObjectId(),
1);
} catch (InterruptedException e) {
throw new IllegalArgumentException(e);
}
CloudPlatformWrapper platform = CloudPlatformWrapper.of(billingProfileModel.getCloudPlatform());
if (platform.isGcp()) {
return signGoogleUrl(cachedSnapshot, fsFile.getCloudPath(), authUser);
} else if (platform.isAzure()) {
return signAzureUrl(billingProfileModel, fsFile, authUser);
} else {
throw new FeatureNotImplementedException("Cloud platform not implemented");
}
}
private DRSAccessMethod assertAccessMethodMatchingAccessId(String accessId, DRSObject object) {
Supplier<IllegalArgumentException> illegalArgumentExceptionSupplier =
() -> new IllegalArgumentException("No matching access ID was found for object");
return object.getAccessMethods().stream()
.filter(drsAccessMethod -> drsAccessMethod.getAccessId().equals(accessId))
.findFirst()
.orElseThrow(illegalArgumentExceptionSupplier);
}
private DRSAccessURL signAzureUrl(
BillingProfileModel profileModel, FSItem fsItem, AuthenticatedUserRequest authUser) {
AzureStorageAccountResource storageAccountResource =
resourceService.lookupStorageAccountMetadata(((FSFile) fsItem).getBucketResourceId());
return new DRSAccessURL()
.url(
azureBlobStorePdao.signFile(
profileModel,
storageAccountResource,
((FSFile) fsItem).getCloudPath(),
ContainerType.DATA,
new BlobSasTokenOptions(
URL_TTL,
new BlobSasPermission().setReadPermission(true),
authUser.getEmail())));
}
private DRSAccessURL signGoogleUrl(
SnapshotCacheResult cachedSnapshot, String gsPath, AuthenticatedUserRequest authUser) {
Storage storage =
StorageOptions.newBuilder()
.setProjectId(cachedSnapshot.googleProjectId)
.build()
.getService();
BlobId locator = GcsUriUtils.parseBlobUri(gsPath);
BlobInfo blobInfo = BlobInfo.newBuilder(locator).build();
Map<String, String> queryParams = new HashMap<>();
queryParams.put(USER_PROJECT_QUERY_PARAM, cachedSnapshot.googleProjectId);
if (authUser != null) {
queryParams.put(REQUESTED_BY_QUERY_PARAM, authUser.getEmail());
}
URL url =
storage.signUrl(
blobInfo,
URL_TTL.toMinutes(),
TimeUnit.MINUTES,
Storage.SignUrlOption.withQueryParams(queryParams),
Storage.SignUrlOption.withV4Signature());
return new DRSAccessURL().url(url.toString());
}
private DRSObject drsObjectFromFSFile(
FSFile fsFile,
SnapshotCacheResult cachedSnapshot,
AuthenticatedUserRequest authUser,
boolean passportAuth) {
DRSObject fileObject = makeCommonDrsObject(fsFile, cachedSnapshot.id.toString());
List<DRSAccessMethod> accessMethods;
CloudPlatformWrapper platform = CloudPlatformWrapper.of(fsFile.getCloudPlatform());
if (platform.isGcp()) {
String gcpRegion = retrieveGCPSnapshotRegion(cachedSnapshot, fsFile);
if (passportAuth) {
accessMethods =
getDrsSignedURLAccessMethods(
ACCESS_ID_PREFIX_GCP + ACCESS_ID_PREFIX_PASSPORT, gcpRegion, passportAuth);
} else {
accessMethods =
getDrsAccessMethodsOnGcp(fsFile, authUser, gcpRegion, cachedSnapshot.googleProjectId);
}
} else if (platform.isAzure()) {
String azureRegion = retrieveAzureSnapshotRegion(fsFile);
if (passportAuth) {
accessMethods =
getDrsSignedURLAccessMethods(
ACCESS_ID_PREFIX_AZURE + ACCESS_ID_PREFIX_PASSPORT, azureRegion, passportAuth);
} else {
accessMethods =
getDrsSignedURLAccessMethods(ACCESS_ID_PREFIX_AZURE, azureRegion, passportAuth);
}
} else {
throw new InvalidCloudPlatformException();
}
fileObject
.mimeType(fsFile.getMimeType())
.checksums(fileService.makeChecksums(fsFile))
.selfUri(drsIdService.makeDrsId(fsFile, cachedSnapshot.id.toString()).toDrsUri())
.accessMethods(accessMethods);
return fileObject;
}
private String retrieveGCPSnapshotRegion(SnapshotCacheResult cachedSnapshot, FSFile fsFile) {
final GoogleRegion region;
if (cachedSnapshot.isSelfHosted) {
// Authorize using the dataset's service account...
Storage storage = gcsProjectFactory.getStorage(cachedSnapshot.datasetProjectId);
Bucket bucket =
storage.get(
GcsUriUtils.parseBlobUri(fsFile.getCloudPath()).getBucket(),
// ...but bill to the snapshot's project
BucketGetOption.userProject(cachedSnapshot.googleProjectId));
region = GoogleRegion.fromValue(bucket.getLocation());
} else {
GoogleBucketResource bucketResource =
resourceService.lookupBucketMetadata(fsFile.getBucketResourceId());
region = bucketResource.getRegion();
}
return region.getValue();
}
private String retrieveAzureSnapshotRegion(FSFile fsFile) {
AzureStorageAccountResource storageAccountResource =
resourceService.lookupStorageAccountMetadata(fsFile.getBucketResourceId());
return storageAccountResource.getRegion().getValue();
}
private List<DRSAccessMethod> getDrsAccessMethodsOnGcp(
FSFile fsFile, AuthenticatedUserRequest authUser, String region, String userProject) {
DRSAccessURL gsAccessURL = new DRSAccessURL().url(fsFile.getCloudPath());
DRSAuthorizations authorizationsBearerOnly = buildDRSAuth(false);
String accessId = ACCESS_ID_PREFIX_GCP + region;
DRSAccessMethod gsAccessMethod =
new DRSAccessMethod()
.type(DRSAccessMethod.TypeEnum.GS)
.accessUrl(gsAccessURL)
.accessId(accessId)
.region(region)
.authorizations(authorizationsBearerOnly);
DRSAccessURL httpsAccessURL =
new DRSAccessURL()
.url(GcsUriUtils.makeHttpsFromGs(fsFile.getCloudPath(), userProject))
.headers(makeAuthHeader(authUser));
DRSAccessMethod httpsAccessMethod =
new DRSAccessMethod()
.type(DRSAccessMethod.TypeEnum.HTTPS)
.accessUrl(httpsAccessURL)
.region(region)
.authorizations(authorizationsBearerOnly);
return List.of(gsAccessMethod, httpsAccessMethod);
}
private List<DRSAccessMethod> getDrsSignedURLAccessMethods(
String prefix, String region, boolean passportAuth) {
String accessId = prefix + region;
DRSAuthorizations authorizations = buildDRSAuth(passportAuth);
DRSAccessMethod httpsAccessMethod =
new DRSAccessMethod()
.type(DRSAccessMethod.TypeEnum.HTTPS)
.accessId(accessId)
.region(region)
.authorizations(authorizations);
return List.of(httpsAccessMethod);
}
private DRSObject drsObjectFromFSDir(FSDir fsDir, String snapshotId) {
DRSObject dirObject = makeCommonDrsObject(fsDir, snapshotId);
DRSChecksum drsChecksum = new DRSChecksum().type("crc32c").checksum("0");
dirObject.size(0L).addChecksumsItem(drsChecksum).contents(makeContentsList(fsDir, snapshotId));
return dirObject;
}
private DRSObject makeCommonDrsObject(FSItem fsObject, String snapshotId) {
// Compute the time once; used for both created and updated times as per DRS spec for immutable
// objects
String theTime = fsObject.getCreatedDate().toString();
DrsId drsId = drsIdService.makeDrsId(fsObject, snapshotId);
return new DRSObject()
.id(drsId.toDrsObjectId())
.name(getLastNameFromPath(fsObject.getPath()))
.createdTime(theTime)
.updatedTime(theTime)
.version(DRS_OBJECT_VERSION)
.description(fsObject.getDescription())
.aliases(Collections.singletonList(fsObject.getPath()))
.size(fsObject.getSize())
.checksums(fileService.makeChecksums(fsObject));
}
private List<DRSContentsObject> makeContentsList(FSDir fsDir, String snapshotId) {
List<DRSContentsObject> contentsList = new ArrayList<>();
for (FSItem fsObject : fsDir.getContents()) {
contentsList.add(makeDrsContentsObject(fsObject, snapshotId));
}
return contentsList;
}
private DRSContentsObject makeDrsContentsObject(FSItem fsObject, String snapshotId) {
DrsId drsId = drsIdService.makeDrsId(fsObject, snapshotId);
List<String> drsUris = new ArrayList<>();
drsUris.add(drsId.toDrsUri());
DRSContentsObject contentsObject =
new DRSContentsObject()
.name(getLastNameFromPath(fsObject.getPath()))
.id(drsId.toDrsObjectId())
.drsUri(drsUris);
if (fsObject instanceof FSDir) {
FSDir fsDir = (FSDir) fsObject;
if (fsDir.isEnumerated()) {
contentsObject.contents(makeContentsList(fsDir, snapshotId));
}
}
return contentsObject;
}
private List<String> makeAuthHeader(AuthenticatedUserRequest authUser) {
// TODO: I added this so that connected tests would work. Seems like we should have a better
// solution.
// I don't like putting test-path-only stuff into the production code.
if (authUser == null || authUser.getToken().isEmpty()) {
return Collections.emptyList();
}
String hdr = String.format("Authorization: Bearer %s", authUser.getToken());
return Collections.singletonList(hdr);
}
public static String getLastNameFromPath(String path) {
String[] pathParts = StringUtils.split(path, '/');
return pathParts[pathParts.length - 1];
}
private SnapshotProject getSnapshotProject(UUID snapshotId) {
return snapshotProjectsCache.computeIfAbsent(
snapshotId, snapshotService::retrieveAvailableSnapshotProject);
}
private SnapshotCacheResult getSnapshot(UUID snapshotId) {
return snapshotCache.computeIfAbsent(
snapshotId, id -> new SnapshotCacheResult(snapshotService.retrieve(id)));
}
private SnapshotSummaryModel getSnapshotSummary(UUID snapshotId) {
return snapshotSummariesCache.computeIfAbsent(
snapshotId, snapshotService::retrieveSnapshotSummary);
}
@VisibleForTesting
static class SnapshotCacheResult {
private final UUID id;
private final Boolean isSelfHosted;
private final BillingProfileModel billingProfileModel;
private final String googleProjectId;
private final String datasetProjectId;
public SnapshotCacheResult(Snapshot snapshot) {
this.id = snapshot.getId();
this.isSelfHosted = snapshot.isSelfHosted();
this.billingProfileModel =
snapshot.getSourceDataset().getDatasetSummary().getDefaultBillingProfile();
var projectResource = snapshot.getProjectResource();
if (projectResource != null) {
this.googleProjectId = projectResource.getGoogleProjectId();
} else {
this.googleProjectId = null;
}
var datasetProjectResource = snapshot.getSourceDataset().getProjectResource();
if (datasetProjectResource != null) {
this.datasetProjectId = datasetProjectResource.getGoogleProjectId();
} else {
this.datasetProjectId = null;
}
}
public UUID getId() {
return this.id;
}
}
}
| Make sure to sign urls with the ingest account, if appropriate (#1373)
| src/main/java/bio/terra/service/filedata/DrsService.java | Make sure to sign urls with the ingest account, if appropriate (#1373) |
|
Java | bsd-3-clause | 69ad788f8c7a47995a52b79e3eee7818867333b0 | 0 | vincentml/basex,joansmith/basex,BaseXdb/basex,deshmnnit04/basex,joansmith/basex,drmacro/basex,BaseXdb/basex,vincentml/basex,JensErat/basex,BaseXdb/basex,deshmnnit04/basex,vincentml/basex,vincentml/basex,deshmnnit04/basex,deshmnnit04/basex,deshmnnit04/basex,vincentml/basex,dimitarp/basex,BaseXdb/basex,JensErat/basex,drmacro/basex,vincentml/basex,ksclarke/basex,vincentml/basex,drmacro/basex,drmacro/basex,deshmnnit04/basex,BaseXdb/basex,vincentml/basex,ksclarke/basex,deshmnnit04/basex,deshmnnit04/basex,dimitarp/basex,joansmith/basex,JensErat/basex,dimitarp/basex,BaseXdb/basex,vincentml/basex,joansmith/basex,dimitarp/basex,joansmith/basex,JensErat/basex,drmacro/basex,drmacro/basex,dimitarp/basex,joansmith/basex,dimitarp/basex,ksclarke/basex,drmacro/basex,dimitarp/basex,JensErat/basex,ksclarke/basex,dimitarp/basex,ksclarke/basex,deshmnnit04/basex,BaseXdb/basex,JensErat/basex,JensErat/basex,JensErat/basex,BaseXdb/basex,dimitarp/basex,drmacro/basex,ksclarke/basex,joansmith/basex,drmacro/basex,JensErat/basex,deshmnnit04/basex,vincentml/basex,JensErat/basex,drmacro/basex,drmacro/basex,joansmith/basex,dimitarp/basex,BaseXdb/basex,joansmith/basex,BaseXdb/basex,vincentml/basex,JensErat/basex,ksclarke/basex,vincentml/basex,deshmnnit04/basex,dimitarp/basex,joansmith/basex,dimitarp/basex,ksclarke/basex,ksclarke/basex,BaseXdb/basex,ksclarke/basex,BaseXdb/basex,joansmith/basex,ksclarke/basex,joansmith/basex,drmacro/basex,JensErat/basex,ksclarke/basex,deshmnnit04/basex | package org.basex.gui.view.project;
import static org.basex.gui.GUIConstants.*;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.util.regex.*;
import org.basex.core.*;
import org.basex.gui.*;
import org.basex.gui.layout.*;
import org.basex.gui.view.editor.*;
import org.basex.io.*;
import org.basex.io.in.*;
import org.basex.util.*;
import org.basex.util.hash.*;
import org.basex.util.list.*;
/**
* Project filter.
*
* @author BaseX Team 2005-14, BSD License
* @author Christian Gruen
*/
final class ProjectFilter extends BaseXBack {
/** Maximum number of filtered hits. */
private static final int MAXHITS = 256;
/** Files. */
private final BaseXTextField files;
/** Contents. */
private final BaseXTextField contents;
/** Project view. */
private final ProjectView project;
/** Cached file paths. */
private final TokenList cache = new TokenList();
/** Last file search. */
private String lastFiles = "";
/** Last content search. */
private String lastContents = "";
/** Running flag. */
private boolean running;
/** Current filter id. */
private int threadID;
/**
* Constructor.
* @param view project view
*/
public ProjectFilter(final ProjectView view) {
project = view;
layout(new BorderLayout(0, 2));
files = new BaseXTextField(view.gui);
files.addFocusListener(project.lastfocus);
contents = new BaseXTextField(view.gui);
contents.hint(Text.FIND_CONTENTS + Text.DOTS);
contents.addFocusListener(project.lastfocus);
add(files, BorderLayout.NORTH);
add(contents, BorderLayout.CENTER);
final KeyAdapter refreshKeys = new KeyAdapter() {
@Override
public void keyPressed(final KeyEvent e) {
if(BaseXKeys.NEXTLINE.is(e) || BaseXKeys.PREVLINE.is(e) ||
BaseXKeys.NEXTPAGE.is(e) || BaseXKeys.PREVPAGE.is(e)) {
project.list.dispatchEvent(e);
} else {
for(final GUIPopupCmd cmd : project.list.commands) {
if(cmd == null) continue;
for(final BaseXKeys sc : cmd.shortcuts()) {
if(sc.is(e)) {
cmd.execute(view.gui);
e.consume();
return;
}
}
}
}
}
@Override
public void keyReleased(final KeyEvent e) {
refresh(false);
}
};
files.addKeyListener(refreshKeys);
contents.addKeyListener(refreshKeys);
refreshLayout();
}
/**
* Resets the filter cache.
*/
void reset() {
cache.reset();
refresh(true);
}
/**
* Initializes the file cache.
* @param thread current thread id
*/
private void init(final int thread) {
if(cache.isEmpty()) {
final TokenSet set = new TokenSet();
set.add(Text.PLEASE_WAIT_D);
project.list.setElements(set, null);
add(project.root.file, thread);
}
}
/**
* Initializes the filter cache.
* @param thread current thread id
* @param root root directory
*/
private void add(final IOFile root, final int thread) {
for(final IOFile file : root.children()) {
if(file.isDir()) {
add(file, thread);
} else {
cache.add(file.path());
}
// newer thread has arrived
if(threadID != thread) {
cache.reset();
return;
}
}
}
/**
* Filters the entries.
* @param file file search string
* @param content content search string
* @param thread thread id
*/
private void filter(final String file, final String content, final int thread) {
// wait when command is still running
while(running) {
Thread.yield();
// newer thread has arrived
if(threadID != thread) return;
}
// thread is accepted; start filtering
running = true;
files.setCursor(CURSORWAIT);
contents.setCursor(CURSORWAIT);
init(thread);
// collect matches
final TokenSet results = new TokenSet();
final IntList il = new IntList();
final TokenParser tp = new TokenParser(Token.token(content));
while(tp.more()) il.add(Token.lc(tp.next()));
if(filter(file, il.toArray(), thread, results)) {
project.list.setElements(results, content.isEmpty() ? null : content);
}
files.setCursor(CURSORTEXT);
contents.setCursor(CURSORTEXT);
running = false;
}
/**
* Refreshes the filter view.
* @param force force refresh
*/
void refresh(final boolean force) {
final String file = files.getText();
final String content = contents.getText();
if(!force && lastFiles.equals(file) && lastContents.equals(content)) return;
lastFiles = file;
lastContents = content;
++threadID;
final boolean list = !file.isEmpty() || !content.isEmpty();
if(list) {
final Thread t = new Thread() {
@Override
public void run() {
filter(file, content, threadID);
}
};
t.setDaemon(true);
t.start();
}
project.showList(list);
}
/**
* Filters the file search field.
* @param ea calling editor
*/
void find(final EditorArea ea) {
final String string = ea.searchString();
if(string != null) {
contents.requestFocusInWindow();
contents.setText(string);
if(ea.opened()) {
final String name = ea.file().name();
final int i = name.lastIndexOf('.');
final String file = files.getText();
final String pattern = file.isEmpty() ? project.gui.gopts.get(GUIOptions.FILES) : file;
if(i != -1 && !pattern.contains("*") && !pattern.contains("?") ||
!Pattern.compile(IOFile.regex(pattern)).matcher(name).matches()) {
files.setText('*' + name.substring(i));
}
}
refresh(false);
} else {
files.requestFocusInWindow();
}
}
/**
* Filters the file search field.
* @param node node
*/
void find(final ProjectNode node) {
if(node != null) files.setText(node.file.path());
refresh(false);
files.requestFocusInWindow();
}
/**
* Called when GUI design has changed.
*/
public void refreshLayout() {
final String filter = project.gui.gopts.get(GUIOptions.FILES).trim();
files.hint(filter.isEmpty() ? Text.FIND_FILES + Text.DOTS : filter);
}
// PRIVATE METHODS ==============================================================================
/**
* Chooses tokens from the file cache that match the specified pattern.
* @param file file pattern
* @param search search string
* @param thread current thread id
* @param results search result
* @return success flag
*/
private boolean filter(final String file, final int[] search, final int thread,
final TokenSet results) {
// glob pattern
final String pattern = file.isEmpty() ? project.gui.gopts.get(GUIOptions.FILES) : file;
if(pattern.contains("*") || pattern.contains("?")) {
final Pattern pt = Pattern.compile(IOFile.regex(pattern));
for(final byte[] input : cache) {
final int offset = offset(input, true);
if(pt.matcher(Token.string(Token.substring(input, offset))).matches() &&
filterContent(input, search, results)) return true;
if(thread != threadID) return false;
}
}
// starts-with, contains, camel case
final byte[] pttrn = Token.replace(Token.lc(Token.token(pattern)), '\\', '/');
final TokenSet exclude = new TokenSet();
final boolean path = Token.indexOf(pttrn, '/') != -1;
for(int i = 0; i < (path ? 2 : 3); i++) {
if(!filter(pttrn, search, thread, i, results, exclude, path)) return false;
}
return true;
}
/**
* Chooses tokens from the file cache that match the specified pattern.
* @param pattern file pattern
* @param search search string
* @param thread current thread id
* @param mode search mode (0-2)
* @param results search result
* @param exclude exclude file from content search
* @param path path flag
* @return success flag
*/
private boolean filter(final byte[] pattern, final int[] search, final int thread, final int mode,
final TokenSet results, final TokenSet exclude, final boolean path) {
if(results.size() < MAXHITS) {
for(final byte[] input : cache) {
// check if current file matches the pattern
final byte[] lc = Token.replace(Token.lc(input), '\\', '/');
final int offset = offset(lc, path);
if(mode == 0 ? Token.startsWith(lc, pattern, offset) :
mode == 1 ? Token.contains(lc, pattern, offset) :
matches(lc, pattern, offset)) {
if(!exclude.contains(input)) {
exclude.add(input);
if(filterContent(input, search, results)) return true;
}
}
if(thread != threadID) return false;
}
}
return true;
}
/**
* Adds a file to the matches if the specified string is found.
* Checks the file contents.
* @param path file path
* @param search search string
* @param results search result
* @return maximum number of results reached
*/
private boolean filterContent(final byte[] path, final int[] search, final TokenSet results) {
// accept file; check file contents
if(filterContent(path, search) && !results.contains(path)) {
results.add(path);
if(results.size() >= MAXHITS) return true;
}
return false;
}
/**
* Searches a string in a file.
* @param path file path
* @param search search string
* @return success flag
*/
private static boolean filterContent(final byte[] path, final int[] search) {
final int cl = search.length;
if(cl == 0) return true;
try {
final TextInput ti = new TextInput(new IOFile(Token.string(path)));
try {
final IntList il = new IntList(cl - 1);
int c = 0;
while(true) {
if(!il.isEmpty()) {
if(il.deleteAt(0) == search[c++]) continue;
c = 0;
}
while(true) {
final int cp = ti.read();
if(cp == -1 || !XMLToken.valid(cp)) return false;
final int lc = Token.lc(cp);
if(c > 0) il.add(lc);
if(lc == search[c]) {
if(++c == cl) return true;
} else {
c = 0;
break;
}
}
}
} finally {
ti.close();
}
} catch(final IOException ex) {
// file may not be accessible
Util.debug(ex);
return false;
}
}
/**
* Returns the offset after the last slash, or {@code 0} if full paths are to be processed.
* @param input input string
* @param path full path processing
* @return resulting offset
*/
private static int offset(final byte[] input, final boolean path) {
if(path) return 0;
final int a = Token.lastIndexOf(input, '\\');
final int b = Token.lastIndexOf(input, '/');
return (a > b ? a : b) + 1;
}
/**
* Checks if the specified string matches a pattern.
* @param input input string
* @param pattern pattern
* @param off offset
* @return result of check
*/
private static boolean matches(final byte[] input, final byte[] pattern, final int off) {
final int il = input.length, pl = pattern.length;
int p = 0;
for(int i = off; i < il && p < pl; i++) {
if(pattern[p] == input[i]) p++;
}
return p == pl;
}
}
| basex-core/src/main/java/org/basex/gui/view/project/ProjectFilter.java | package org.basex.gui.view.project;
import static org.basex.gui.GUIConstants.*;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.util.regex.*;
import org.basex.core.*;
import org.basex.gui.*;
import org.basex.gui.layout.*;
import org.basex.gui.view.editor.*;
import org.basex.io.*;
import org.basex.io.in.*;
import org.basex.util.*;
import org.basex.util.hash.*;
import org.basex.util.list.*;
/**
* Project filter.
*
* @author BaseX Team 2005-14, BSD License
* @author Christian Gruen
*/
final class ProjectFilter extends BaseXBack {
/** Maximum number of filtered hits. */
private static final int MAXHITS = 256;
/** Files. */
private final BaseXTextField files;
/** Contents. */
private final BaseXTextField contents;
/** Project view. */
private final ProjectView project;
/** Cached file paths. */
private final TokenList cache = new TokenList();
/** Last file search. */
private String lastFiles = "";
/** Last content search. */
private String lastContents = "";
/** Running flag. */
private boolean running;
/** Current filter id. */
private int threadID;
/**
* Constructor.
* @param view project view
*/
public ProjectFilter(final ProjectView view) {
project = view;
layout(new BorderLayout(0, 2));
files = new BaseXTextField(view.gui);
files.addFocusListener(project.lastfocus);
contents = new BaseXTextField(view.gui);
contents.hint(Text.FIND_CONTENTS + Text.DOTS);
contents.addFocusListener(project.lastfocus);
add(files, BorderLayout.NORTH);
add(contents, BorderLayout.CENTER);
final KeyAdapter refreshKeys = new KeyAdapter() {
@Override
public void keyPressed(final KeyEvent e) {
if(BaseXKeys.NEXTLINE.is(e) || BaseXKeys.PREVLINE.is(e) ||
BaseXKeys.NEXTPAGE.is(e) || BaseXKeys.PREVPAGE.is(e)) {
project.list.dispatchEvent(e);
} else {
for(final GUIPopupCmd cmd : project.list.commands) {
if(cmd == null) continue;
for(final BaseXKeys sc : cmd.shortcuts()) {
if(sc.is(e)) {
cmd.execute(view.gui);
e.consume();
return;
}
}
}
}
}
@Override
public void keyReleased(final KeyEvent e) {
refresh(false);
}
};
files.addKeyListener(refreshKeys);
contents.addKeyListener(refreshKeys);
refreshLayout();
}
/**
* Resets the filter cache.
*/
void reset() {
cache.reset();
refresh(true);
}
/**
* Initializes the file cache.
* @param thread current thread id
*/
private void init(final int thread) {
if(cache.isEmpty()) {
final TokenSet set = new TokenSet();
set.add(Text.PLEASE_WAIT_D);
project.list.setElements(set, null);
add(project.root.file, thread);
}
}
/**
* Initializes the filter cache.
* @param thread current thread id
* @param root root directory
*/
private void add(final IOFile root, final int thread) {
for(final IOFile file : root.children()) {
if(file.isDir()) {
add(file, thread);
} else {
cache.add(file.path());
}
// newer thread has arrived
if(threadID != thread) {
cache.reset();
return;
}
}
}
/**
* Filters the entries.
* @param file file search string
* @param content content search string
* @param thread thread id
*/
private void filter(final String file, final String content, final int thread) {
// wait when command is still running
while(running) {
Thread.yield();
// newer thread has arrived
if(threadID != thread) return;
}
// thread is accepted; start filtering
running = true;
files.setCursor(CURSORWAIT);
contents.setCursor(CURSORWAIT);
init(thread);
// collect matches
final TokenSet results = new TokenSet();
final IntList il = new IntList();
final TokenParser tp = new TokenParser(Token.token(content));
while(tp.more()) il.add(Token.lc(tp.next()));
if(filter(file, il.toArray(), thread, results)) {
project.list.setElements(results, content.isEmpty() ? null : content);
}
files.setCursor(CURSORTEXT);
contents.setCursor(CURSORTEXT);
running = false;
}
/**
* Refreshes the filter view.
* @param force force refresh
*/
void refresh(final boolean force) {
final String file = files.getText();
final String content = contents.getText();
if(!force && lastFiles.equals(file) && lastContents.equals(content)) return;
lastFiles = file;
lastContents = content;
++threadID;
final boolean list = !file.isEmpty() || !content.isEmpty();
if(list) {
final Thread t = new Thread() {
@Override
public void run() {
filter(file, content, threadID);
}
};
t.setDaemon(true);
t.start();
}
project.showList(list);
}
/**
* Filters the file search field.
* @param ea calling editor
*/
void find(final EditorArea ea) {
final String string = ea.searchString();
if(string != null) {
contents.requestFocusInWindow();
contents.setText(string);
if(ea.opened()) {
final String name = ea.file().name();
final int i = name.lastIndexOf('.');
final String file = files.getText();
final String pattern = file.isEmpty() ? project.gui.gopts.get(GUIOptions.FILES) : file;
if(i != -1 && !pattern.contains("*") && !pattern.contains("?") ||
!Pattern.compile(IOFile.regex(pattern)).matcher(name).matches()) {
files.setText('*' + name.substring(i));
}
}
refresh(false);
} else {
files.requestFocusInWindow();
}
}
/**
* Filters the file search field.
* @param node node
*/
void find(final ProjectNode node) {
if(node != null) files.setText(node.file.path());
refresh(false);
files.requestFocusInWindow();
}
/**
* Called when GUI design has changed.
*/
public void refreshLayout() {
final String filter = project.gui.gopts.get(GUIOptions.FILES).trim();
files.hint(filter.isEmpty() ? Text.FIND_FILES + Text.DOTS : filter);
}
// PRIVATE METHODS ==============================================================================
/**
* Chooses tokens from the file cache that match the specified pattern.
* @param file file pattern
* @param search search string
* @param thread current thread id
* @param results search result
* @return success flag
*/
private boolean filter(final String file, final int[] search, final int thread,
final TokenSet results) {
// glob pattern
final String pattern = file.isEmpty() ? project.gui.gopts.get(GUIOptions.FILES) : file;
if(pattern.contains("*") || pattern.contains("?")) {
final Pattern pt = Pattern.compile(IOFile.regex(pattern));
for(final byte[] input : cache) {
final int offset = offset(input, true);
if(pt.matcher(Token.string(Token.substring(input, offset))).matches() &&
filterContent(input, search, results)) return true;
if(thread != threadID) return false;
}
}
// starts-with, contains, camel case
final byte[] pttrn = Token.replace(Token.lc(Token.token(pattern)), '\\', '/');
final TokenSet exclude = new TokenSet();
final boolean path = Token.indexOf(pttrn, '/') != -1;
for(int i = 0; i < (path ? 2 : 3); i++) {
if(!filter(pttrn, search, thread, i, results, exclude, path)) return false;
}
return true;
}
/**
* Chooses tokens from the file cache that match the specified pattern.
* @param pattern file pattern
* @param search search string
* @param thread current thread id
* @param mode search mode (0-2)
* @param results search result
* @param exclude exclude file from content search
* @param path path flag
* @return success flag
*/
private boolean filter(final byte[] pattern, final int[] search, final int thread, final int mode,
final TokenSet results, final TokenSet exclude, final boolean path) {
if(results.size() < MAXHITS) {
for(final byte[] in : cache) {
// check if current file matches the pattern
final byte[] input = Token.replace(Token.lc(in), '\\', '/');
final int offset = offset(input, path);
if(mode == 0 ? Token.startsWith(input, pattern, offset) :
mode == 1 ? Token.contains(input, pattern, offset) :
matches(input, pattern, offset)) {
if(!exclude.contains(input)) {
exclude.add(input);
if(filterContent(input, search, results)) return true;
}
}
if(thread != threadID) return false;
}
}
return true;
}
/**
* Adds a file to the matches if the specified string is found.
* Checks the file contents.
* @param path file path
* @param search search string
* @param results search result
* @return maximum number of results reached
*/
private boolean filterContent(final byte[] path, final int[] search, final TokenSet results) {
// accept file; check file contents
if(filterContent(path, search) && !results.contains(path)) {
results.add(path);
if(results.size() >= MAXHITS) return true;
}
return false;
}
/**
* Searches a string in a file.
* @param path file path
* @param search search string
* @return success flag
*/
private static boolean filterContent(final byte[] path, final int[] search) {
final int cl = search.length;
if(cl == 0) return true;
try {
final TextInput ti = new TextInput(new IOFile(Token.string(path)));
try {
final IntList il = new IntList(cl - 1);
int c = 0;
while(true) {
if(!il.isEmpty()) {
if(il.deleteAt(0) == search[c++]) continue;
c = 0;
}
while(true) {
final int cp = ti.read();
if(cp == -1 || !XMLToken.valid(cp)) return false;
final int lc = Token.lc(cp);
if(c > 0) il.add(lc);
if(lc == search[c]) {
if(++c == cl) return true;
} else {
c = 0;
break;
}
}
}
} finally {
ti.close();
}
} catch(final IOException ex) {
// file may not be accessible
Util.debug(ex);
return false;
}
}
/**
* Returns the offset after the last slash, or {@code 0} if full paths are to be processed.
* @param input input string
* @param path full path processing
* @return resulting offset
*/
private static int offset(final byte[] input, final boolean path) {
if(path) return 0;
final int a = Token.lastIndexOf(input, '\\');
final int b = Token.lastIndexOf(input, '/');
return (a > b ? a : b) + 1;
}
/**
* Checks if the specified string matches a pattern.
* @param input input string
* @param pattern pattern
* @param off offset
* @return result of check
*/
private static boolean matches(final byte[] input, final byte[] pattern, final int off) {
final int il = input.length, pl = pattern.length;
int p = 0;
for(int i = off; i < il && p < pl; i++) {
if(pattern[p] == input[i]) p++;
}
return p == pl;
}
}
| [FIX] Project filter: add original path to result list
| basex-core/src/main/java/org/basex/gui/view/project/ProjectFilter.java | [FIX] Project filter: add original path to result list |
|
Java | mit | 749631554ff96bcbb33001f0f1b3c21d24ba9151 | 0 | fadelakin/vortex | app/src/main/java/com/fisheradelakin/vortex/GetJSON.java | package com.fisheradelakin.vortex;
import android.content.Context;
import android.content.res.AssetManager;
import java.io.IOException;
import java.io.InputStream;
/**
* Created by Fisher on 1/30/15.
*/
public class GetJSON {
}
| remove class
| app/src/main/java/com/fisheradelakin/vortex/GetJSON.java | remove class |
||
Java | mit | 789cba832d073c9809f7a0162c2b622b9fb3b328 | 0 | weeryan17/FlareBot,FlareBot/FlareBot,binaryoverload/FlareBot | package stream.flarebot.flarebot.commands.moderation;
import net.dv8tion.jda.core.EmbedBuilder;
import net.dv8tion.jda.core.Permission;
import net.dv8tion.jda.core.entities.Member;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.MessageEmbed;
import net.dv8tion.jda.core.entities.Role;
import net.dv8tion.jda.core.entities.TextChannel;
import net.dv8tion.jda.core.entities.User;
import stream.flarebot.flarebot.FlareBot;
import stream.flarebot.flarebot.commands.Command;
import stream.flarebot.flarebot.commands.CommandType;
import stream.flarebot.flarebot.objects.GuildWrapper;
import stream.flarebot.flarebot.permissions.Group;
import stream.flarebot.flarebot.permissions.PerGuildPermissions;
import stream.flarebot.flarebot.util.GeneralUtils;
import stream.flarebot.flarebot.util.MessageUtils;
import java.awt.Color;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
public class PermissionsCommand implements Command {
@Override
public void onCommand(User sender, GuildWrapper guild, TextChannel channel, Message message, String[] args, Member member) {
if (args.length > 2) {
if (args[0].equalsIgnoreCase("group")) {
String groupString = args[1];
if (args[2].equals("add")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!! You can create it with `" + getPrefix(channel.getGuild()) + "permissions group " + groupString + " create`", channel);
return;
} else {
if (!GeneralUtils.validPerm(args[3])) {
MessageUtils.sendErrorMessage("That is an invalid permission! Permissions start with `flarebot.` followed with a command name!\n" +
"**Example:** `flarebot.play`", channel);
return;
}
if (group.addPermission(args[3])) {
MessageUtils.sendSuccessMessage("Successfully added the permission `" + args[3] + "` to the group `" + groupString + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("Couldn't add the permission (it probably already exists)", channel);
return;
}
}
}
} else if (args[2].equals("remove")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
if (group.removePermission(args[3])) {
MessageUtils.sendSuccessMessage("Successfully removed the permission `" + args[3] + "` from the group `" + groupString + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("Couldn't remove the permission (it probably didn't exist)", channel);
return;
}
}
}
} else if (args[2].equals("create")) {
if (args.length == 3) {
if (getPermissions(channel).addGroup(groupString)) {
MessageUtils.sendSuccessMessage("Successfully created group: `" + groupString + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("That group already exists!!", channel);
return;
}
}
} else if (args[2].equals("delete")) {
if (args.length == 3) {
if (getPermissions(channel).getGroup(groupString) == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
getPermissions(channel).deleteGroup(groupString);
return;
}
}
} else if (args[2].equals("link")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!! You can create it with `" + getPrefix(channel.getGuild()) + "permissions group " + groupString + " create`", channel);
return;
} else {
Role role = GeneralUtils.getRole(args[3], guild.getGuildId());
if (role != null) {
group.linkRole(role.getId());
MessageUtils.sendSuccessMessage("Successfully linked the group `" + groupString + "` to the role `" + role.getName() + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("That role doesn't exist!", channel);
return;
}
}
}
} else if (args[2].equals("unlink")) {
if (args.length == 3) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
Role role = guild.getGuild().getRoleById(group.getRoleId());
if (role == null) {
MessageUtils.sendErrorMessage("Cannot unlink if a role isn't linked!!", channel);
return;
} else {
group.linkRole(null);
MessageUtils.sendSuccessMessage("Successfully unlinked the role " + role.getName() + " from the group " + group.getName(), channel, sender);
return;
}
}
}
} else if (args[2].equals("list")) {
if (args.length == 3 || args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
int page = args.length == 4 ? Integer.valueOf(args[3]) : 1;
Set<String> perms = group.getPermissions();
List<String> permList = GeneralUtils.orderList(perms);
String list = getStringList(permList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Perms", list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
perms.size() < pageSize ? 1 : (perms.size() / pageSize) + (perms.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
}
} else if (args[2].equals("massadd")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
List<Member> roleMembers;
String roleName = "";
if (args[3].equals("@everyone")) {
roleMembers = guild.getGuild().getMembers();
roleName = "everyone";
} else if (args[3].equals("@here")) {
roleMembers = channel.getMembers();
roleName = "here";
} else {
Role role = GeneralUtils.getRole(args[3], guild.getGuildId());
if (role != null) {
roleMembers = guild.getGuild().getMembersWithRoles(role);
} else {
MessageUtils.sendErrorMessage("That role doesn't exist!!", channel);
return;
}
}
for (Member user : roleMembers) {
getPermissions(channel).getUser(user).addGroup(group);
}
MessageUtils.sendSuccessMessage("Successfully added the group `" + groupString + "` to everyone in the role @" + roleName, channel, sender);
return;
}
}
}
} else if (args[0].equalsIgnoreCase("user")) {
String userString = args[1];
User user = GeneralUtils.getUser(userString, guild.getGuildId());
if (user == null) {
MessageUtils.sendErrorMessage("That user doesn't exist!!", channel);
return;
}
stream.flarebot.flarebot.permissions.User permUser =
getPermissions(channel).getUser(guild.getGuild().getMember(user));
if (args[2].equals("group")) {
if (args.length >= 4) {
if (args[3].equals("add")) {
if (args.length == 5) {
String groupString = args[4];
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exists!! You can create it with `" + getPrefix(channel.getGuild()) + "permissions group " + groupString + " create`", channel);
return;
}
permUser.addGroup(group);
MessageUtils.sendSuccessMessage("Successfully added the group `" + groupString + "` to " + user.getAsMention(), channel, sender);
return;
}
} else if (args[3].equals("remove")) {
if (args.length == 5) {
String groupString = args[4];
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exists!!", channel);
return;
}
if (permUser.removeGroup(group)) {
MessageUtils.sendSuccessMessage("Successfully removed the group `" + groupString + "` from " + user.getAsMention(), channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("The user doesn't have that group!!", channel);
return;
}
}
} else if (args[3].equals("list")) {
int page = args.length == 5 ? Integer.valueOf(args[4]) : 1;
Set<String> groups = permUser.getGroups();
List<String> groupList = GeneralUtils.orderList(groups);
String list = getStringList(groupList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Groups for " + MessageUtils.getTag(user), list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
groups.size() < pageSize ? 1 : (groups.size() / pageSize) + (groups.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
}
} else if (args[2].equals("permission")) {
if (args.length >= 4) {
if (args[3].equals("add")) {
if (args.length == 5) {
if (!GeneralUtils.validPerm(args[4])) {
MessageUtils.sendErrorMessage("That is an invalid permission! Permissions start with `flarebot.` followed with a command name!\n" +
"**Example:** `flarebot.play`", channel);
return;
}
if (permUser.addPermission(args[4])) {
MessageUtils.sendSuccessMessage("Successfully added the permission `" + args[4] + "` to " + user.getAsMention(), channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("The user doesn't have that permission!!", channel);
return;
}
}
} else if (args[3].equals("remove")) {
if (args.length == 5) {
if (permUser.removePermission(args[4])) {
MessageUtils.sendSuccessMessage("Successfully removed the permission `" + args[4] + "` from " + user.getAsMention(), channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("The user already has that permission!!", channel);
return;
}
}
} else if (args[3].equals("list")) {
int page = args.length == 5 ? Integer.valueOf(args[4]) : 1;
Set<String> perms = permUser.getPermissions();
List<String> permList = GeneralUtils.orderList(perms);
String list = getStringList(permList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Perms", list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
perms.size() < pageSize ? 1 : (perms.size() / pageSize) + (perms.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
}
}
}
} else if (args.length >= 1) {
if (args[0].equalsIgnoreCase("groups")) {
if (this.getPermissions(channel).getListGroups().isEmpty()) {
channel.sendMessage(MessageUtils.getEmbed(sender)
.setColor(Color.RED)
.setDescription("There are no groups for this guild!")
.build()).queue();
return;
} else {
int page = args.length == 2 ? Integer.valueOf(args[4]) : 1;
Set<String> groups = this.getPermissions(channel).getGroups().keySet();
List<String> groupList = GeneralUtils.orderList(groups);
String list = getStringList(groupList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Groups", list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
groups.size() < pageSize ? 1 : (groups.size() / pageSize) + (groups.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
} else if (args[0].equalsIgnoreCase("reset")) {
guild.setPermissions(new PerGuildPermissions());
MessageUtils.sendSuccessMessage("Successfully reset perms", channel, sender);
return;
} else if (args[0].equalsIgnoreCase("restoredefault")) {
guild.getPermissions().createDefaultGroup();
MessageUtils.sendSuccessMessage("Successfully restored the Default group", channel, sender);
return;
}
}
EmbedBuilder usage = new EmbedBuilder();
usage.setTitle("Usage");
for (MessageEmbed.Field field : getEmbedUsage().getFields()) {
usage.addField(GeneralUtils.formatCommandPrefix(channel, field.getName()), GeneralUtils.formatCommandPrefix(channel, field.getValue()), field.isInline());
}
usage.setColor(Color.RED);
channel.sendMessage(usage.build()).queue();
}
private String getStringList(Collection<String> perms, int page) {
int pageSize = 20;
int pages = perms.size() < pageSize ? 1 : (perms.size() / pageSize) + (perms.size() % pageSize != 0 ? 1 : 0);
int start;
int end;
start = pageSize * (page - 1);
end = Math.min(start + pageSize, perms.size());
if (page > pages || page < 0) {
return null;
}
String[] permsList = new String[perms.size()];
permsList = perms.toArray(permsList);
permsList = Arrays.copyOfRange(permsList, start, end);
StringBuilder sb = new StringBuilder();
sb.append("```\n");
for (String perm : permsList) {
sb.append(perm + "\n");
}
sb.append("```");
return sb.toString();
}
@Override
public String getCommand() {
return "permissions";
}
@Override
public String[] getAliases() {
return new String[]{"perm", "perms"};
}
@Override
public String getDescription() {
return "Manages server-wide permissions for FlareBot.";
}
//TODO: Pagination
@Override
public String getUsage() {
return "**`{%}permissions group <group>` - All usage in this section starts with this**\n" +
"`add <perm>` - Adds a permission to a group\n" +
"`remove <perm>` - removes a perm from a group\n" +
"`create` - creates a group\n" +
"`delete` - deletes the group\n" +
"`link <role>` - links the group to a discord role\n" +
"`unlink` - unlinks it from a role\n" +
"`list [page]` - lists the permissions this group has\n" +
"`massadd <@everyone/@here/role>` - puts everyone with the giving role into the group\n" +
"\n" +
"**`{%}permissions user <user>` - All usage in this section starts with this**\n" +
"`group add <group>` - adds a group to this user\n" +
"`group remove <group>` - removes a group from this user\n" +
"`group list [page]` - lists the groups this user is in\n" +
"`permissions add <perm>` - adds a permissions to this user\n" +
"`permissions remove <perm>` - removes a permission from this user\n" +
"`permissions list [page]` - list the permmissions this user has (exulding those obtained from groups)\n\n" +
"`{%}permissions groups` - Lists all the groups in a server\n" +
"`{%}permissions reset` - Resets all of the guilds perms\n" +
"`{%}permissions restoredefault` - Restores the default group and adds any new default perms that might have been added";
}
public EmbedBuilder getEmbedUsage() {
EmbedBuilder eb = new EmbedBuilder();
eb.addField("`{%}permissions group <group>`",
"`add <perm>` - Adds a permission to a group\n" +
"`remove <perm>` - removes a perm from a group\n" +
"`create` - creates a group\n" +
"`delete` - deletes the group\n" +
"`link <role>` - links the group to a discord role\n" +
"`unlink` - unlinks it from a role\n" +
"`list [page]` - lists the permissions this group has\n" +
"`massadd <@everyone/@here/role>` - puts everyone with the giving role into the group", false);
eb.addField("`{%}permissions user <user>`",
"`group add <group>` - adds a group to this user\n" +
"`group remove <group>` - removes a group from this user\n" +
"`group list [page]` - lists the groups this user is in\n" +
"`permissions add <perm>` - adds a permissions to this user\n" +
"`permissions remove <perm>` - removes a permission from this user\n" +
"`permissions list [page]` - list the permmissions this user has (exulding those obtained from groups)", false);
eb.addField("Misc",
"`{%}permissions groups` - Lists all the groups in a server\n" +
"`{%}permissions reset` - Resets all of the guilds perms\n" +
"`{%}permissions restoredefault` - Restores the default group and adds any new default perms that might have been added", false);
return eb;
}
@Override
public CommandType getType() {
return CommandType.MODERATION;
}
@Override
public boolean isDefaultPermission() {
return false;
}
@Override
public EnumSet<Permission> getDiscordPermission() {
return EnumSet.of(Permission.MANAGE_PERMISSIONS);
}
}
| src/main/java/stream/flarebot/flarebot/commands/moderation/PermissionsCommand.java | package stream.flarebot.flarebot.commands.moderation;
import net.dv8tion.jda.core.EmbedBuilder;
import net.dv8tion.jda.core.Permission;
import net.dv8tion.jda.core.entities.Member;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.MessageEmbed;
import net.dv8tion.jda.core.entities.Role;
import net.dv8tion.jda.core.entities.TextChannel;
import net.dv8tion.jda.core.entities.User;
import stream.flarebot.flarebot.FlareBot;
import stream.flarebot.flarebot.commands.Command;
import stream.flarebot.flarebot.commands.CommandType;
import stream.flarebot.flarebot.objects.GuildWrapper;
import stream.flarebot.flarebot.permissions.Group;
import stream.flarebot.flarebot.permissions.PerGuildPermissions;
import stream.flarebot.flarebot.util.GeneralUtils;
import stream.flarebot.flarebot.util.MessageUtils;
import java.awt.Color;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
public class PermissionsCommand implements Command {
@Override
public void onCommand(User sender, GuildWrapper guild, TextChannel channel, Message message, String[] args, Member member) {
if (args.length > 2) {
if (args[0].equalsIgnoreCase("group")) {
String groupString = args[1];
if (args[2].equals("add")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!! You can create it with `" + getPrefix(channel.getGuild()) + "permissions group " + groupString + " create`", channel);
return;
} else {
if (!GeneralUtils.validPerm(args[3])) {
MessageUtils.sendErrorMessage("That is an invalid permission! Permissions start with `flarebot.` followed with a command name!\n" +
"**Example:** `flarebot.play`", channel);
return;
}
if (group.addPermission(args[3])) {
MessageUtils.sendSuccessMessage("Successfully added the permission `" + args[3] + "` to the group `" + groupString + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("Couldn't add the permission (it probably already exists)", channel);
return;
}
}
}
} else if (args[2].equals("remove")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
if (group.removePermission(args[3])) {
MessageUtils.sendSuccessMessage("Successfully removed the permission `" + args[3] + "` from the group `" + groupString + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("Couldn't remove the permission (it probably didn't exist)", channel);
return;
}
}
}
} else if (args[2].equals("create")) {
if (args.length == 3) {
if (getPermissions(channel).addGroup(groupString)) {
MessageUtils.sendSuccessMessage("Successfully created group: `" + groupString + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("That group already exists!!", channel);
return;
}
}
} else if (args[2].equals("delete")) {
if (args.length == 3) {
if (getPermissions(channel).getGroup(groupString) == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
getPermissions(channel).deleteGroup(groupString);
return;
}
}
} else if (args[2].equals("link")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!! You can create it with `" + getPrefix(channel.getGuild()) + "permissions group " + groupString + " create`", channel);
return;
} else {
Role role = GeneralUtils.getRole(args[3], guild.getGuildId());
if (role != null) {
group.linkRole(role.getId());
MessageUtils.sendSuccessMessage("Successfully linked the group `" + groupString + "` to the role `" + role.getName() + "`", channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("That role doesn't exist!", channel);
return;
}
}
}
} else if (args[2].equals("unlink")) {
if (args.length == 3) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
Role role = guild.getGuild().getRoleById(group.getRoleId());
if (role == null) {
MessageUtils.sendErrorMessage("Cannot unlink if a role isn't linked!!", channel);
return;
} else {
group.linkRole(null);
MessageUtils.sendSuccessMessage("Successfully unlinked the role " + role.getName() + " from the group " + group.getName(), channel, sender);
return;
}
}
}
} else if (args[2].equals("list")) {
if (args.length == 3 || args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
int page = args.length == 4 ? Integer.valueOf(args[3]) : 1;
Set<String> perms = group.getPermissions();
List<String> permList = GeneralUtils.orderList(perms);
String list = getStringList(permList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Perms", list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
perms.size() < pageSize ? 1 : (perms.size() / pageSize) + (perms.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
}
} else if (args[2].equals("massadd")) {
if (args.length == 4) {
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exist!!", channel);
return;
} else {
List<Member> roleMembers;
String roleName = "";
if (args[3].equals("@everyone")) {
roleMembers = guild.getGuild().getMembers();
roleName = "everyone";
} else if (args[3].equals("@here")) {
roleMembers = channel.getMembers();
roleName = "here";
} else {
Role role = GeneralUtils.getRole(args[3], guild.getGuildId());
if (role != null) {
roleMembers = guild.getGuild().getMembersWithRoles(role);
} else {
MessageUtils.sendErrorMessage("That role doesn't exist!!", channel);
return;
}
}
for (Member user : roleMembers) {
getPermissions(channel).getUser(user).addGroup(group);
}
MessageUtils.sendSuccessMessage("Successfully added the group `" + groupString + "` to everyone in the role @" + roleName, channel, sender);
return;
}
}
}
} else if (args[0].equalsIgnoreCase("user")) {
String userString = args[1];
User user = GeneralUtils.getUser(userString, guild.getGuildId());
if (user == null) {
MessageUtils.sendErrorMessage("That user doesn't exist!!", channel);
return;
}
stream.flarebot.flarebot.permissions.User permUser =
getPermissions(channel).getUser(guild.getGuild().getMember(user));
if (args[2].equals("group")) {
if (args.length >= 4) {
if (args[3].equals("add")) {
if (args.length == 5) {
String groupString = args[4];
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exists!! You can create it with `" + getPrefix(channel.getGuild()) + "permissions group " + groupString + " create`", channel);
return;
}
permUser.addGroup(group);
MessageUtils.sendSuccessMessage("Successfully added the group `" + groupString + "` to " + user.getAsMention(), channel, sender);
return;
}
} else if (args[3].equals("remove")) {
if (args.length == 5) {
String groupString = args[4];
Group group = getPermissions(channel).getGroup(groupString);
if (group == null) {
MessageUtils.sendErrorMessage("That group doesn't exists!!", channel);
return;
}
if (permUser.removeGroup(group)) {
MessageUtils.sendSuccessMessage("Successfully removed the group `" + groupString + "` from " + user.getAsMention(), channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("The user doesn't have that group!!", channel);
return;
}
}
} else if (args[3].equals("list")) {
int page = args.length == 5 ? Integer.valueOf(args[4]) : 1;
Set<String> groups = permUser.getGroups();
List<String> groupList = GeneralUtils.orderList(groups);
String list = getStringList(groupList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Groups for " + MessageUtils.getTag(user), list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
groups.size() < pageSize ? 1 : (groups.size() / pageSize) + (groups.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
}
} else if (args[2].equals("permission")) {
if (args.length >= 4) {
if (args[3].equals("add")) {
if (args.length == 5) {
if (!GeneralUtils.validPerm(args[4])) {
MessageUtils.sendErrorMessage("That is an invalid permission! Permissions start with `flarebot.` followed with a command name!\n" +
"**Example:** `flarebot.play`", channel);
return;
}
if (permUser.addPermission(args[4])) {
MessageUtils.sendSuccessMessage("Successfully added the permission `" + args[4] + "` to " + user.getAsMention(), channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("The user doesn't have that permission!!", channel);
return;
}
}
} else if (args[3].equals("remove")) {
if (args.length == 5) {
if (permUser.removePermission(args[4])) {
MessageUtils.sendSuccessMessage("Successfully removed the permission `" + args[4] + "` from " + user.getAsMention(), channel, sender);
return;
} else {
MessageUtils.sendErrorMessage("The user already has that permission!!", channel);
return;
}
}
} else if (args[3].equals("list")) {
int page = args.length == 5 ? Integer.valueOf(args[4]) : 1;
Set<String> perms = permUser.getPermissions();
List<String> permList = GeneralUtils.orderList(perms);
String list = getStringList(permList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Perms", list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
perms.size() < pageSize ? 1 : (perms.size() / pageSize) + (perms.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
}
}
}
} else if (args.length >= 1) {
if (args[0].equalsIgnoreCase("groups")) {
if (this.getPermissions(channel).getListGroups().isEmpty()) {
channel.sendMessage(MessageUtils.getEmbed(sender)
.setColor(Color.RED)
.setDescription("There are no groups for this guild!")
.build()).queue();
return;
} else {
int page = args.length == 2 ? Integer.valueOf(args[4]) : 1;
Set<String> groups = this.getPermissions(channel).getGroups().keySet();
List<String> groupList = GeneralUtils.orderList(groups);
String list = getStringList(groupList, page);
EmbedBuilder eb = MessageUtils.getEmbed(sender);
eb.addField("Groups", list, false);
eb.addField("Current page", String.valueOf(page), true);
int pageSize = 20;
int pages =
groups.size() < pageSize ? 1 : (groups.size() / pageSize) + (groups.size() % pageSize != 0 ? 1 : 0);
eb.addField("Pages", String.valueOf(pages), true);
eb.setColor(Color.CYAN);
channel.sendMessage(eb.build()).queue();
return;
}
} else if (args[0].equalsIgnoreCase("reset")) {
guild.setPermissions(new PerGuildPermissions());
MessageUtils.sendSuccessMessage("Successfully reset perms", channel, sender);
return;
} else if (args[0].equalsIgnoreCase("restoredefault")) {
guild.getPermissions().createDefaultGroup();
MessageUtils.sendSuccessMessage("Successfully restored the Default group", channel, sender);
return;
}
}
EmbedBuilder usage = new EmbedBuilder();
usage.setTitle("Usage");
for (MessageEmbed.Field field : getEmbedUsage().getFields()) {
usage.addField(GeneralUtils.formatCommandPrefix(channel, field.getName()), GeneralUtils.formatCommandPrefix(channel, field.getValue()), field.isInline());
}
usage.setColor(Color.RED);
channel.sendMessage(usage.build()).queue();
}
private String getStringList(Collection<String> perms, int page) {
int pageSize = 20;
int pages = perms.size() < pageSize ? 1 : (perms.size() / pageSize) + (perms.size() % pageSize != 0 ? 1 : 0);
int start;
int end;
start = pageSize * (page - 1);
end = Math.min(start + pageSize, perms.size());
if (page > pages || page < 0) {
return null;
}
String[] permsList = new String[perms.size()];
permsList = perms.toArray(permsList);
permsList = Arrays.copyOfRange(permsList, start, end);
StringBuilder sb = new StringBuilder();
sb.append("```\n");
for (String perm : permsList) {
sb.append(perm + "\n");
}
sb.append("```");
return sb.toString();
}
@Override
public String getCommand() {
return "permissions";
}
@Override
public String[] getAliases() {
return new String[]{"perm", "perms"};
}
@Override
public String getDescription() {
return "Manages server-wide permissions for FlareBot.";
}
//TODO: Pagination
@Override
public String getUsage() {
return "Run `{%}permissions` to see the usage";
}
public EmbedBuilder getEmbedUsage() {
EmbedBuilder eb = new EmbedBuilder();
eb.addField("`{%}permissions group <group>`",
"`add <perm>` - Adds a permission to a group\n" +
"`remove <perm>` - removes a perm from a group\n" +
"`create` - creates a group\n" +
"`delete` - deletes the group\n" +
"`link <role>` - links the group to a discord role\n" +
"`unlink` - unlinks it from a role\n" +
"`list [page]` - lists the permissions this group has\n" +
"`massadd <@everyone/@here/role>` - puts everyone with the giving role into the group", false);
eb.addField("`{%}permissions user <user>`",
"`group add <group>` - adds a group to this user\n" +
"`group remove <group>` - removes a group from this user\n" +
"`group list [page]` - lists the groups this user is in\n" +
"`permissions add <perm>` - adds a permissions to this user\n" +
"`permissions remove <perm>` - removes a permission from this user\n" +
"`permissions list [page]` - list the permmissions this user has (exulding those obtained from groups)", false);
eb.addField("Misc",
"`{%}permissions groups` - Lists all the groups in a server\n" +
"`{%}permissions reset` - Resets all of the guilds perms\n" +
"`{%}permissions restoredefault` - Restores the default group and adds any new default perms that might have been added", false);
return eb;
}
@Override
public CommandType getType() {
return CommandType.MODERATION;
}
@Override
public boolean isDefaultPermission() {
return false;
}
@Override
public EnumSet<Permission> getDiscordPermission() {
return EnumSet.of(Permission.MANAGE_PERMISSIONS);
}
}
| Revert "Changed the usage string do it doesn't break with _usage"
This reverts commit 34f1d4519129c60bfa57ff0d1e899ed78151ebb1.
| src/main/java/stream/flarebot/flarebot/commands/moderation/PermissionsCommand.java | Revert "Changed the usage string do it doesn't break with _usage" |
|
Java | mit | error: pathspec 'test/abc/player/VoiceTest.java' did not match any file(s) known to git
| 3cc0a589d88897d82886b43d7e765eb3a5e46e63 | 1 | mcyuan00/ABC-music-player | package abc.player;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
public class VoiceTest {
@Test(expected=AssertionError.class)
public void testAssertionsEnabled() {
assert false; // make sure assertions are enabled with VM argument: -ea
}
// test get Player Elements: notes one after another, two measures
@Test
public void testGetPlayerElementsSimpleNotes(){
List<Music> notes = new ArrayList<Music>();
notes.add(new Note(new Fraction(1,4), 'A', 0));
notes.add(new Note(new Fraction(1,4), 'B', 0));
notes.add(new Note(new Fraction(1,4), 'C', 0));
notes.add(new Note(new Fraction(1,4), 'D', 0));
Measure m = new Measure(notes, false, false, false, false);
List<Music> notes2 = new ArrayList<Music>();
notes.add(new Note(new Fraction(1,4), 'A', 0));
notes.add(new Note(new Fraction(1,4), 'B', 0));
notes.add(new Note(new Fraction(1,4), 'C', 0));
notes.add(new Note(new Fraction(1,4), 'D', 0));
Measure m2 = new Measure(notes2, false, false, false, false);
Voice v = new Voice("voice", Arrays.asList(m,m2));
List<PlayerElement> p = v.getPlayerElements(0, 12, new Fraction(1,4));
List<PlayerElement> expected = new ArrayList<PlayerElement>();
expected.add(new PlayerElement(new Pitch('A'), 0, 12));
expected.add(new PlayerElement(new Pitch('B'), 12, 12));
expected.add(new PlayerElement(new Pitch('C'), 24, 12));
expected.add(new PlayerElement(new Pitch('D'), 36, 12));
expected.add(new PlayerElement(new Pitch('A'), 48, 12));
expected.add(new PlayerElement(new Pitch('B'), 60, 12));
expected.add(new PlayerElement(new Pitch('C'), 72, 12));
expected.add(new PlayerElement(new Pitch('D'), 84, 12));
for (int i = 0; i < p.size(); i++){
assertEquals(expected.get(i), p.get(i));
}
}
// test get Player Elements: simple note measure followed by chord note easure, two measures
@Test
public void testGetPlayerElementsSimpleAndChords(){
List<Music> notes = new ArrayList<Music>();
notes.add(new Note(new Fraction(1,4), 'A', 0));
notes.add(new Note(new Fraction(1,4), 'B', 0));
notes.add(new Note(new Fraction(1,4), 'C', 0));
notes.add(new Note(new Fraction(1,4), 'D', 0));
Measure m = new Measure(notes, false, false, false, false);
List<Music> notes2 = new ArrayList<Music>();
notes2.add(new Note(new Fraction(1,4), 'A', 0));
//triplet
List<Music> triplet = new ArrayList<Music>();
triplet.add(new Note(new Fraction(1,3), 'B', 0));
triplet.add(new Note(new Fraction(2,3), 'C', 0));
triplet.add(new Note(new Fraction(1,4), 'D', 0));
notes2.add(new Chord(triplet));
Measure m2 = new Measure(notes2, false, false, false, false);
Voice v = new Voice("voice", Arrays.asList(m,m2));
List<PlayerElement> p = v.getPlayerElements(0, 12, new Fraction(1,4));
List<PlayerElement> expected = new ArrayList<PlayerElement>();
expected.add(new PlayerElement(new Pitch('A'), 0, 12));
expected.add(new PlayerElement(new Pitch('B'), 12, 12));
expected.add(new PlayerElement(new Pitch('C'), 24, 12));
expected.add(new PlayerElement(new Pitch('D'), 36, 12));
expected.add(new PlayerElement(new Pitch('A'), 48, 12));
expected.add(new PlayerElement(new Pitch('B'), 60, 16));
expected.add(new PlayerElement(new Pitch('C'), 60, 32));
expected.add(new PlayerElement(new Pitch('D'), 60, 12));
for (int i = 0; i < p.size(); i++){
assertEquals(expected.get(i), p.get(i));
}
}
}
| test/abc/player/VoiceTest.java | add voiceTest
| test/abc/player/VoiceTest.java | add voiceTest |
|
Java | mit | error: pathspec 'src/com/hackerearth/TaskScheduler.java' did not match any file(s) known to git
| a8c3abb68cdd67a09a90e31f47118407858ebfd2 | 1 | sureshsajja/CodeRevisited,sureshsajja/CodingProblems | package com.hackerearth;
import java.util.Arrays;
import java.util.Collections;
import java.util.Scanner;
/**
* For a task i, it takes one day to start a job, and ki days to complete.
* For given n tasks, schedule it effectively such that it takes min days to complete
* <p/>
* input:
* 2 (No of tasks)
* 3
* 1
* <p/>
* output:
* 4
*/
public class TaskScheduler {
public static void main(String[] args) throws Exception {
Scanner scanner = new Scanner(System.in);
int K = scanner.nextInt();
Integer[] queue = new Integer[K];
for (int i = 0; i < K; i++) {
queue[i] = scanner.nextInt();
}
Arrays.sort(queue, Collections.reverseOrder());
int time = 0;
int pendingTime = 0;
for (Integer i : queue) {
if (pendingTime > 0)
pendingTime--;
time = time + 1;
if (pendingTime < i)
pendingTime = i;
}
System.out.println(time + pendingTime);
}
}
| src/com/hackerearth/TaskScheduler.java | For a task i, it takes one day to start a job, and ki days to complete. For given n tasks, schedule it effectively such that it takes min days to complete
| src/com/hackerearth/TaskScheduler.java | For a task i, it takes one day to start a job, and ki days to complete. For given n tasks, schedule it effectively such that it takes min days to complete |
|
Java | mit | error: pathspec 'src/main/java/leetcode/Problem698.java' did not match any file(s) known to git
| e4683853b70f49690f89e261d84c5c03c173c165 | 1 | fredyw/leetcode,fredyw/leetcode,fredyw/leetcode,fredyw/leetcode | package leetcode;
/**
* https://leetcode.com/problems/partition-to-k-equal-sum-subsets/
*/
public class Problem698 {
public boolean canPartitionKSubsets(int[] nums, int k) {
// TODO
return false;
}
public static void main(String[] args) {
Problem698 prob = new Problem698();
System.out.println(prob.canPartitionKSubsets(new int[]{4, 3, 2, 3, 5, 2, 1}, 4)); // true
}
}
| src/main/java/leetcode/Problem698.java | Skeleton for problem 698
| src/main/java/leetcode/Problem698.java | Skeleton for problem 698 |
|
Java | mit | error: pathspec 'src/org/sunflow/core/RenderObject.java' did not match any file(s) known to git
| 5fc76538f11e88a64c8ce41a175468d595105ce0 | 1 | fpsunflower/sunflow,fpsunflower/sunflow,k-matsuzaki/sunflow,k-matsuzaki/sunflow,fpsunflower/sunflow,matheusabrantesgadelha/sunflow,fpsunflower/sunflow,k-matsuzaki/sunflow,matheusabrantesgadelha/sunflow,matheusabrantesgadelha/sunflow,fpsunflower/sunflow,matheusabrantesgadelha/sunflow,k-matsuzaki/sunflow,monkstone/sunflow,matheusabrantesgadelha/sunflow,monkstone/sunflow,monkstone/sunflow,k-matsuzaki/sunflow,k-matsuzaki/sunflow,fpsunflower/sunflow,matheusabrantesgadelha/sunflow,monkstone/sunflow,monkstone/sunflow | package org.sunflow.core;
import org.sunflow.SunflowAPI;
/**
* This is the base interface for all public rendering object interfaces. It
* handles incremental updates via {@link ParameterList} objects.
*/
public interface RenderObject {
/**
* Update this object given a list of parameters. This method is guarenteed
* to be called at least once on every object, but it should correctly
* handle empty parameter lists. This means that the object should be in a
* valid state from the time it is constructed. This method should also
* return true or false depending on whether the update was succesfull or
* not.
*
* @param pl list of parameters to read from
* @param api reference to the current scene
* @return <code>true</code> if the update is succesfull,
* <code>false</code> otherwise
*/
public boolean update(ParameterList pl, SunflowAPI api);
} | src/org/sunflow/core/RenderObject.java | Added missing file from previous commit
| src/org/sunflow/core/RenderObject.java | Added missing file from previous commit |
|
Java | mit | error: pathspec 'src/com/blarg/gdx/math/MathHelpers.java' did not match any file(s) known to git
| eca790e624a6f6969a8d0e6205dc1ca2521d1939 | 1 | gered/gdx-toolbox | package com.blarg.gdx.math;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.math.Vector3;
public final class MathHelpers {
static final Vector2 v2tmpA = new Vector2();
public static final float EPSILON = 0.0000000001f;
public static final float UP_2D = 90.0f;
public static final float DOWN_2D = 270.0f;
public static final float LEFT_2D = 180.0f;
public static final float RIGHT_2D = 0.0f;
public static final Vector2 UP_VECTOR2 = getDirectionVector2(UP_2D); // new Vector2(0.0f, 1.0f);
public static final Vector2 DOWN_VECTOR2 = getDirectionVector2(DOWN_2D); // new Vector2(0.0f, -1.0f);
public static final Vector2 LEFT_VECTOR2 = getDirectionVector2(LEFT_2D); // new Vector2(-1.0f, 0.0f);
public static final Vector2 RIGHT_VECTOR2 = getDirectionVector2(RIGHT_2D); // new Vector2(1.0f, 0.0f)
public static final Vector3 UP_VECTOR3 = new Vector3(0.0f, 1.0f, 0.0f);
public static final Vector3 DOWN_VECTOR3 = new Vector3(0.0f, -1.0f, 0.0f);
public static final Vector3 FORWARD_VECTOR3 = new Vector3(0.0f, 0.0f, -1.0f);
public static final Vector3 BACKWARD_VECTOR3 = new Vector3(0.0f, 0.0f, 1.0f);
public static final Vector3 LEFT_VECTOR3 = new Vector3(-1.0f, 0.0f, 0.0f);
public static final Vector3 RIGHT_VECTOR3 = new Vector3(1.0f, 0.0f, 0.0f);
public static void getDirectionVector2(float degrees, Vector2 result) {
result.set(1.0f, 0.0f);
result.setAngle(degrees);
}
public static void getDirectionVector3FromYAxis(float yAxisDegrees, Vector3 result) {
result.y = 0.0f;
float adjustedAngle = rolloverClamp(yAxisDegrees - 90.0f, 0.0f, 360.0f);
getPointOnCircle(1.0f, adjustedAngle, v2tmpA);
result.x = v2tmpA.x;
result.z = v2tmpA.y;
}
public static void getDirectionVector3FromAngles(float yawDegrees, float pitchDegrees, Vector3 result) {
result.x = MathUtils.cosDeg(yawDegrees) * MathUtils.cosDeg(pitchDegrees);
result.y = MathUtils.sinDeg(yawDegrees) * MathUtils.cosDeg(pitchDegrees);
result.z = MathUtils.sinDeg(pitchDegrees);
}
public static float getAngleBetween2D(final Vector2 a, final Vector2 b) {
v2tmpA.set(a);
v2tmpA.sub(b);
return v2tmpA.angle();
}
public static void getPointOnCircle(float radius, float degrees, Vector2 result) {
result.x = radius * MathUtils.cosDeg(degrees);
result.y = radius * MathUtils.sinDeg(degrees);
}
public static void getCartesianCoordsFromSpherical(float radius, float inclination, float azimuth, Vector3 result) {
result.x = radius * MathUtils.sinDeg(inclination) * MathUtils.sinDeg(azimuth);
result.y = radius * MathUtils.cosDeg(inclination);
result.z = radius * MathUtils.sinDeg(inclination) * MathUtils.cosDeg(azimuth);
}
public static boolean areAlmostEqual(float a, float b) {
return areAlmostEqual(a, b, EPSILON);
}
public static boolean areAlmostEqual(float a, float b, float epsilon)
{
float diff = Math.abs(a - b);
a = Math.abs(a);
b = Math.abs(b);
float largest = (b > a) ? b : a;
return (diff <= largest * epsilon);
}
public static float rolloverClamp(float value, float min, float max) {
float temp = value;
float range = Math.abs(max - min);
do {
if (temp < min)
temp += range;
if (temp > max)
temp -= range;
} while (temp < min || temp > max);
return temp;
}
public static int rolloverClamp(int value, int min, int max) {
int temp = value;
int range = Math.abs(max - min);
do {
if (temp < min)
temp += range;
if (temp > max)
temp -= range;
} while (temp < min || temp > max);
return temp;
}
public static float lerp(float a, float b, float t) {
return a + (b - a) * t;
}
public static int lerp(int a, int b, int t) {
return a + (b - a) * t;
}
public static float inverseLerp(float a, float b, float lerpValue) {
return (lerpValue - a) / (b - a);
}
public static int inverseLerp(int a, int b, int lerpValue) {
return (lerpValue - a) / (b - a);
}
public static float scaleRange(float value, float originalMin, float originalMax, float newMin, float newMax) {
return (value / ((originalMax - originalMin) / (newMax - newMin))) + newMin;
}
public static int scaleRange(int value, int originalMin, int originalMax, int newMin, int newMax) {
return (value / ((originalMax - originalMin) / (newMax - newMin))) + newMin;
}
public static float smoothStep(float low, float high, float t) {
float n = MathUtils.clamp(t, 0.0f, 1.0f);
return lerp(low, high, (n * n) * (3.0f - (2.0f * n)));
}
// convenience overloads that should not really be used except in non-performance-critical situations
public static Vector2 getDirectionVector2(float degrees) {
Vector2 result = new Vector2();
getDirectionVector2(degrees, result);
return result;
}
public static Vector3 getDirectionVector3FromYAxis(float yAxisDegrees) {
Vector3 result = new Vector3();
getDirectionVector3FromYAxis(yAxisDegrees, result);
return result;
}
public static Vector3 getDirectionVector3FromAngles(float yawDegrees, float pitchDegrees) {
Vector3 result = new Vector3();
getDirectionVector3FromAngles(yawDegrees, pitchDegrees, result);
return result;
}
public static Vector2 getPointOnCircle(float radius, float degrees) {
Vector2 result = new Vector2();
getPointOnCircle(radius, degrees, result);
return result;
}
}
| src/com/blarg/gdx/math/MathHelpers.java | add MathHelpers
| src/com/blarg/gdx/math/MathHelpers.java | add MathHelpers |
|
Java | mit | error: pathspec 'src/test/java/org/takes/rq/RqMethodTest.java' did not match any file(s) known to git
| 17c6440b78191c2a3afb72df55c6508275bffdec | 1 | xupyprmv/takes,pecko/takes,mstzn36/takes,georgeyanev/takes,ikhvostenkov/takes,pecko/takes,erimerturk/takes,essobedo/takes,antonini/takes,hs3180/takes,hs3180/takes,bdragan/takes,simonjenga/takes,simonjenga/takes,ekondrashev/takes,mstzn36/takes,xupyprmv/takes,dalifreire/takes,antonini/takes,erimerturk/takes,ikhvostenkov/takes,Bertram25/takes,bdragan/takes,essobedo/takes,dalifreire/takes,yegor256/takes,georgeyanev/takes,ekondrashev/takes,RamIndani/takes,Bertram25/takes,RamIndani/takes,yegor256/takes | /**
* The MIT License (MIT)
*
* Copyright (c) 2015 Yegor Bugayenko
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.takes.rq;
import java.io.IOException;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
/**
* Test case for {@link org.takes.rq.RqMethod}.
* @author Dmitry Zaytsev ([email protected])
* @version $Id$
* @since 0.9.1
*/
public final class RqMethodTest {
/**
* RqMethod can get a method.
* @throws IOException If some problem inside
*/
@Test
public void getMethod() throws IOException {
MatcherAssert.assertThat(
new RqMethod(new RqFake(RqMethod.POST)).method(),
Matchers.equalTo(RqMethod.POST)
);
}
}
| src/test/java/org/takes/rq/RqMethodTest.java | #48 add unit test for RqMethod
| src/test/java/org/takes/rq/RqMethodTest.java | #48 add unit test for RqMethod |
|
Java | mit | error: pathspec 'src/main/java/sotechat/ServletInitializer.java' did not match any file(s) known to git
| c6134cb62475d4409e35ab77423fe6d518897636 | 1 | PauliNiva/Sotechat,PauliNiva/Sotechat,PauliNiva/Sotechat | package sotechat;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.web.SpringBootServletInitializer;
public class ServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(Application.class);
}
}
| src/main/java/sotechat/ServletInitializer.java | Added servletinitializer
| src/main/java/sotechat/ServletInitializer.java | Added servletinitializer |
|
Java | mit | error: pathspec 'src/test/java/com/fluxchess/pulse/PulseTest.java' did not match any file(s) known to git
| fd2668778407032e9bf7d2511645ecf1bc957662 | 1 | jvoegele/pulse,jvoegele/pulse,jvoegele/pulse,fluxroot/pulse,fluxroot/pulse | /*
* Copyright 2013-2014 the original author or authors.
*
* This file is part of Pulse Chess.
*
* Pulse Chess is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Pulse Chess is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Pulse Chess. If not, see <http://www.gnu.org/licenses/>.
*/
package com.fluxchess.pulse;
import com.fluxchess.jcpi.commands.*;
import com.fluxchess.jcpi.models.GenericBoard;
import com.fluxchess.jcpi.models.GenericColor;
import com.fluxchess.jcpi.models.GenericMove;
import com.fluxchess.jcpi.models.GenericPosition;
import com.fluxchess.jcpi.protocols.IProtocolHandler;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
public class PulseTest {
private final BlockingQueue<IEngineCommand> commands = new LinkedBlockingQueue<>();
@Before
public void setUp() {
commands.clear();
// Put a default command list into the queue for each test
commands.add(new EngineInitializeRequestCommand());
commands.add(new EngineReadyRequestCommand("test"));
commands.add(new EngineNewGameCommand());
commands.add(new EngineAnalyzeCommand(
new GenericBoard(GenericBoard.STANDARDSETUP),
Arrays.asList(new GenericMove(GenericPosition.c2, GenericPosition.c4)))
);
}
@Test
public void testClock() {
// Test if our time management works
EngineStartCalculatingCommand command = new EngineStartCalculatingCommand();
command.setClock(GenericColor.WHITE, 1000L);
command.setClockIncrement(GenericColor.WHITE, 0L);
command.setClock(GenericColor.BLACK, 1000L);
command.setClockIncrement(GenericColor.BLACK, 0L);
commands.add(command);
new Pulse(new ProtocolHandler()).run();
}
private class ProtocolHandler implements IProtocolHandler {
@Override
public IEngineCommand receive() throws IOException {
IEngineCommand command = null;
try {
command = commands.take();
} catch (InterruptedException e) {
fail();
}
return command;
}
@Override
public void send(ProtocolInitializeAnswerCommand command) {
}
@Override
public void send(ProtocolReadyAnswerCommand command) {
assertEquals("test", command.token);
}
@Override
public void send(ProtocolBestMoveCommand command) {
commands.add(new EngineQuitCommand());
}
@Override
public void send(ProtocolInformationCommand command) {
}
}
}
| src/test/java/com/fluxchess/pulse/PulseTest.java | Re-add PulseTest.java
| src/test/java/com/fluxchess/pulse/PulseTest.java | Re-add PulseTest.java |
|
Java | mit | error: pathspec 'src/main/java/cz/jcu/prf/uai/javamugs/logic/Report.java' did not match any file(s) known to git
| 2d2fbf12325b28992467b3efed7a44bb53926caa | 1 | JavaMugs/CloneHero | package cz.jcu.prf.uai.javamugs.logic;
import sun.reflect.generics.reflectiveObjects.NotImplementedException;
public class Report {
private long score;
private int multiplier;
public Report() {
// TODO Auto-generated constructor stub
}
public long getScore() {
return score;
}
public long getMultiplier() {
return multiplier;
}
public Chord getChordToDraw() {
throw new NotImplementedException();
}
}
| src/main/java/cz/jcu/prf/uai/javamugs/logic/Report.java | Add Report class | src/main/java/cz/jcu/prf/uai/javamugs/logic/Report.java | Add Report class |
|
Java | mit | error: pathspec 'remo-common/src/main/java/org/stevedowning/remo/internal/common/future/CompletionFuture.java' did not match any file(s) known to git
| 9bbba2d32d567b7d84801cad155e75c60ddeb9d3 | 1 | steve-downing/Remo | package org.stevedowning.remo.internal.common.future;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
public class CompletionFuture extends BasicFuture<Void> {
public void await() throws InterruptedException, ExecutionException {
get();
}
public void await(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException {
get(timeout, unit);
}
public void setCompleted() {
setVal(null);
}
}
| remo-common/src/main/java/org/stevedowning/remo/internal/common/future/CompletionFuture.java | Initial cut of CompletionFuture
| remo-common/src/main/java/org/stevedowning/remo/internal/common/future/CompletionFuture.java | Initial cut of CompletionFuture |
|
Java | cc0-1.0 | error: pathspec 'Java/BigInteger/BaseConversion.java' did not match any file(s) known to git
| 104d733d287005f6d50896bdb9ad22e7c341b4c2 | 1 | shohan4556/Ionic-Proton,shohan4556/Ionic-Proton,shohan4556/Ionic-Proton | import java.util.*;
import java.math.BigInteger;
class all_base_conversion{
public static void main(String args[]){
// converte Decimal to any Base
String n="5547";
BigInteger number=new BigInteger(n);
System.out.println("Number : "+number);
System.out.println("BInary : "+number.toString(2)); // converted to Binary
System.out.println("Octal : "+number.toString(8)); // converted to Octal
System.out.println("Hexadecimal : "+number.toString(16)); // converted to Hexadecimal
// convert binary to Decimal
String binary="10110011";
BigInteger num=new BigInteger(binary,2); // binary to Decimal
BigInteger hex=new BigInteger("15ab",16); // hexadecimal to Decimal
System.out.println("\n"+"Number :"+binary);
System.out.println("Binary to Decimal :"+num);
System.out.println("Hexadecimal to Decimal :"+hex);
System.out.println("Hexadecimal to Binary :"+hex.toString(2));
}
}
| Java/BigInteger/BaseConversion.java | Create BaseConversion.java
file name should be "all_base_conversion" | Java/BigInteger/BaseConversion.java | Create BaseConversion.java |
|
Java | mpl-2.0 | error: pathspec 'qadevOOo/tests/java/mod/_fwk/LayoutManager.java' did not match any file(s) known to git
| 23839e4959eb2c04b26a4b485d35ba9afb87db6d | 1 | JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core | /*************************************************************************
*
* $RCSfile: LayoutManager.java,v $
*
* $Revision: 1.2 $
*
* last change: $Date: 2004-02-25 18:14:05 $
*
* The Contents of this file are made available subject to the terms of
* either of the following licenses
*
* - GNU Lesser General Public License Version 2.1
* - Sun Industry Standards Source License Version 1.1
*
* Sun Microsystems Inc., October, 2000
*
* GNU Lesser General Public License Version 2.1
* =============================================
* Copyright 2000 by Sun Microsystems, Inc.
* 901 San Antonio Road, Palo Alto, CA 94303, USA
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License version 2.1, as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*
*
* Sun Industry Standards Source License Version 1.1
* =================================================
* The contents of this file are subject to the Sun Industry Standards
* Source License Version 1.1 (the "License"); You may not use this file
* except in compliance with the License. You may obtain a copy of the
* License at http://www.openoffice.org/license.html.
*
* Software provided under this License is provided on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
* WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS,
* MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING.
* See the License for the specific provisions governing your rights and
* obligations concerning the Software.
*
* The Initial Developer of the Original Code is: Sun Microsystems, Inc.
*
* Copyright: 2000 by Sun Microsystems, Inc.
*
* All Rights Reserved.
*
* Contributor(s): _______________________________________
*
*
************************************************************************/
package mod._fwk;
import com.sun.star.beans.PropertyValue;
import com.sun.star.beans.XPropertySet;
import com.sun.star.frame.XController;
import com.sun.star.frame.XFrame;
import com.sun.star.frame.XModel;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.lang.XSingleServiceFactory;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.uno.XInterface;
import drafts.com.sun.star.ui.XModuleUIConfigurationManagerSupplier;
import ifc.ui._XUIConfiguration;
import java.io.PrintWriter;
import com.sun.star.lang.EventObject;
import com.sun.star.text.XText;
import com.sun.star.text.XTextCursor;
import com.sun.star.text.XTextDocument;
import com.sun.star.util.XCloseable;
import drafts.com.sun.star.ui.ConfigurationEvent;
import drafts.com.sun.star.ui.XUIConfigurationManager;
import drafts.com.sun.star.ui.XUIConfigurationManagerSupplier;
import lib.StatusException;
import lib.TestCase;
import lib.TestEnvironment;
import lib.TestParameters;
import util.WriterTools;
/**
*/
public class LayoutManager extends TestCase {
XInterface xManager = null;
XTextDocument xTextDoc;
/**
* Cleanup: close the created document
* @param tParam The test parameters.
* @param The log writer.
* @return The test environment.
*/
protected void cleanup(TestParameters tParam, PrintWriter log) {
log.println(" disposing xTextDoc ");
try {
XCloseable closer = (XCloseable) UnoRuntime.queryInterface(
XCloseable.class, xTextDoc);
closer.close(true);
} catch (com.sun.star.util.CloseVetoException e) {
log.println("couldn't close document");
} catch (com.sun.star.lang.DisposedException e) {
log.println("couldn't close document");
}
}
/**
* Create test environment:
* <ul>
* <li>Create test doc</li>
* <li>Get the frame</li>
* <li>Get the LayoutManager from the frame</li>
* </ul>
* @param tParam The test parameters.
* @param The log writer.
* @return The test environment.
*/
protected TestEnvironment createTestEnvironment(TestParameters tParam, PrintWriter log) {
TestEnvironment tEnv = null;
XMultiServiceFactory xMSF = (XMultiServiceFactory)tParam.getMSF();
log.println("Creating instance...");
xTextDoc = WriterTools.createTextDoc(xMSF);
XText xText = xTextDoc.getText();
XTextCursor xTextCursor = xText.createTextCursor();
for (int i = 0; i < 11; i++) {
xText.insertString(xTextCursor, "A sample text and why not? ", false);
}
XFrame xFrame = xTextDoc.getCurrentController().getFrame();
XPropertySet xProp = (XPropertySet)UnoRuntime.queryInterface(XPropertySet.class, xFrame);
try {
Object any = xProp.getPropertyValue("LayoutManager");
xManager = (XInterface)UnoRuntime.queryInterface(XInterface.class, any);
}
catch(com.sun.star.beans.UnknownPropertyException e) {
e.printStackTrace(log);
throw new StatusException("Could not get property 'LayoutManager' from the current frame.", e);
}
catch(com.sun.star.lang.WrappedTargetException e) {
e.printStackTrace(log);
throw new StatusException("Could not get property 'LayoutManager' from the current frame.", e);
}
// just to make sure, it's the right one.
log.println("TestObject: " + util.utils.getImplName(xManager));
tEnv = new TestEnvironment(xManager);
tEnv.addObjRelation("XLayoutManager.Frame", xTextDoc.getCurrentController().getFrame());
return tEnv;
}
}
| qadevOOo/tests/java/mod/_fwk/LayoutManager.java | INTEGRATION: CWS layoutmanager (1.1.2); FILE ADDED
2004/02/19 09:04:13 sg 1.1.2.1: #i25017#NEW: initial version
| qadevOOo/tests/java/mod/_fwk/LayoutManager.java | INTEGRATION: CWS layoutmanager (1.1.2); FILE ADDED 2004/02/19 09:04:13 sg 1.1.2.1: #i25017#NEW: initial version |
|
Java | agpl-3.0 | d7c147d9bef95b697d50b930a6c80aba1b672e3c | 0 | nate-sentjens/tigase-xmpp-java,wangningbo/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,wangningbo/tigase-server,cgvarela/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,sourcebits-praveenkh/Tagase,f24-ag/tigase,fanout/tigase-server,fanout/tigase-server,caiyingyuan/tigase71,wangningbo/tigase-server,Smartupz/tigase-server,fanout/tigase-server,fanout/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java,f24-ag/tigase,wangningbo/tigase-server,wangningbo/tigase-server,nate-sentjens/tigase-xmpp-java,Smartupz/tigase-server,caiyingyuan/tigase71,fanout/tigase-server,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,pivotal-nathan-sentjens/tigase-xmpp-java,amikey/tigase-server,Smartupz/tigase-server,nate-sentjens/tigase-xmpp-java,sourcebits-praveenkh/Tagase,pivotal-nathan-sentjens/tigase-xmpp-java,Smartupz/tigase-server,fanout/tigase-server,caiyingyuan/tigase71,sourcebits-praveenkh/Tagase,amikey/tigase-server,caiyingyuan/tigase71,cgvarela/tigase-server,cgvarela/tigase-server,amikey/tigase-server,sourcebits-praveenkh/Tagase,cgvarela/tigase-server,amikey/tigase-server,caiyingyuan/tigase71,f24-ag/tigase,f24-ag/tigase,nate-sentjens/tigase-xmpp-java,Smartupz/tigase-server,cgvarela/tigase-server,Smartupz/tigase-server,amikey/tigase-server,wangningbo/tigase-server,nate-sentjens/tigase-xmpp-java,cgvarela/tigase-server,wangningbo/tigase-server,caiyingyuan/tigase71,f24-ag/tigase,sourcebits-praveenkh/Tagase,sourcebits-praveenkh/Tagase,amikey/tigase-server,pivotal-nathan-sentjens/tigase-xmpp-java | /*
* Tigase Jabber/XMPP Server
* Copyright (C) 2004-2007 "Artur Hefczyc" <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. Look for COPYING file in the top folder.
* If not, see http://www.gnu.org/licenses/.
*
* $Rev$
* Last modified by $Author$
* $Date$
*/
package tigase.server.xmppsession;
//import tigase.auth.TigaseConfiguration;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.security.Security;
import java.util.Arrays;
import java.util.Collection;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.LinkedHashMap;
import java.util.Queue;
import java.util.Timer;
import java.util.TimerTask;
import java.util.UUID;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.Enumeration;
import javax.script.Bindings;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptEngineManager;
import tigase.auth.TigaseSaslProvider;
import tigase.conf.Configurable;
import tigase.db.DataOverwriteException;
import tigase.db.NonAuthUserRepository;
import tigase.db.RepositoryFactory;
import tigase.db.TigaseDBException;
import tigase.db.UserAuthRepository;
import tigase.db.UserNotFoundException;
import tigase.db.UserRepository;
import tigase.disco.ServiceEntity;
import tigase.disco.ServiceIdentity;
import tigase.disco.XMPPService;
import tigase.server.AbstractMessageReceiver;
import tigase.server.Command;
import tigase.server.Packet;
import tigase.server.Permissions;
import tigase.server.Priority;
import tigase.server.ReceiverEventHandler;
import tigase.server.XMPPServer;
import tigase.stats.StatRecord;
import tigase.util.JIDUtils;
import tigase.util.PriorityQueue;
import tigase.xml.Element;
import tigase.xmpp.Authorization;
import tigase.xmpp.NotAuthorizedException;
import tigase.xmpp.PacketErrorTypeException;
import tigase.xmpp.ProcessorFactory;
import tigase.xmpp.StanzaType;
import tigase.xmpp.XMPPPostprocessorIfc;
import tigase.xmpp.XMPPPreprocessorIfc;
import tigase.xmpp.XMPPProcessorIfc;
import tigase.xmpp.XMPPResourceConnection;
import tigase.xmpp.XMPPSession;
import tigase.xmpp.XMPPStopListenerIfc;
import tigase.xmpp.ConnectionStatus;
import tigase.xmpp.XMPPPacketFilterIfc;
import static tigase.server.xmppsession.SessionManagerConfig.*;
/**
* Class SessionManager
*
*
* Created: Tue Nov 22 07:07:11 2005
*
* @author <a href="mailto:[email protected]">Artur Hefczyc</a>
* @author <a href="mailto:[email protected]">Peter Sandström</a> - multi-threadin
* support for pluggins processors.
* @version $Rev$
*/
public class SessionManager extends AbstractMessageReceiver
implements Configurable, XMPPService, SessionManagerHandler {
/**
* Variable <code>log</code> is a class logger.
*/
private static final Logger log =
Logger.getLogger(SessionManager.class.getName());
protected static final String SESSION_PACKETS = "session-packets";
protected static final String ADMIN_COMMAND_NODE =
"http://jabber.org/protocol/admin";
private UserRepository user_repository = null;
private UserAuthRepository auth_repository = null;
private NonAuthUserRepository naUserRepository = null;
private PacketFilter filter = null;
//private String[] hostnames = {"localhost"};
private String[] admins = {"admin@localhost"};
private String[] trusted = {"admin@localhost"};
//private String[] anon_peers = {"admin@localhost"};
private ConcurrentHashMap<String, XMPPSession> sessionsByNodeId =
new ConcurrentHashMap<String, XMPPSession>();
private ConcurrentHashMap<String, XMPPResourceConnection> connectionsByFrom =
new ConcurrentHashMap<String, XMPPResourceConnection>();
private Map<String, XMPPPreprocessorIfc> preProcessors =
new ConcurrentSkipListMap<String, XMPPPreprocessorIfc>();
private Map<String, ProcessorThreads> processors =
new ConcurrentSkipListMap<String, ProcessorThreads>();
private Map<String, XMPPPostprocessorIfc> postProcessors =
new ConcurrentSkipListMap<String, XMPPPostprocessorIfc>();
private Map<String, XMPPStopListenerIfc> stopListeners =
new ConcurrentSkipListMap<String, XMPPStopListenerIfc>();
private Map<String, Map<String, Object>> plugin_config =
new ConcurrentSkipListMap<String, Map<String, Object>>();
private Map<String, XMPPPacketFilterIfc> outFilters =
new ConcurrentSkipListMap<String, XMPPPacketFilterIfc>();
private Map<String, AdminCommandIfc> adminCommands =
new ConcurrentSkipListMap<String, AdminCommandIfc>();
private Timer authenticationWatchdog = new Timer("SM authentocation watchdog");
private ScriptEngineManager scriptEngineManager = new ScriptEngineManager();
private ConnectionCheckCommandHandler connectionCheckCommandHandler =
new ConnectionCheckCommandHandler();
//private Set<String> anonymous_domains = new HashSet<String>();
//private XMPPResourceConnection serverSession = null;
private ServiceEntity serviceEntity = null;
// private ServiceEntity adminDisco = null;
private long closedConnections = 0;
private long authTimeouts = 0;
private int maxPluginsNo = 0;
private Timer reaperTask = null;
private long reaperInterval = 60 * 1000;
private long maxIdleTime = 86400 * 1000;
@Override
public void start() {
super.start();
reaperTask = new Timer("Session reaper task", true);
reaperTask.schedule(new TimerTask() {
@Override
public void run() {
long currentTime = System.currentTimeMillis();
for (Enumeration<XMPPResourceConnection> e = connectionsByFrom.elements(); e.hasMoreElements(); ) {
XMPPResourceConnection xrc = e.nextElement();
if (!"session-id-sess-man".equals(xrc.getSessionId())) {
if (currentTime - xrc.getLastAccessed() > maxIdleTime && currentTime - xrc.getCreationTime() > reaperInterval) {
if (log.isLoggable(Level.WARNING)) {
log.warning("Logging out " + xrc.getSessionId() + " after >" + (maxIdleTime/1000) + " seconds of inactivity");
}
try {
xrc.logout();
} catch (NotAuthorizedException ex) {
if (log.isLoggable(Level.WARNING)) {
log.warning("Could not logout " + xrc.getSessionId() + ": " + ex.getMessage());
}
}
}
}
}
}
}, reaperInterval, reaperInterval);
}
@Override
public void stop() {
super.stop();
reaperTask.cancel();
reaperTask = null;
}
@Override
public void setName(String name) {
super.setName(name);
serviceEntity = new ServiceEntity(name, "sm", "Session manager");
serviceEntity.addIdentities(
new ServiceIdentity("component", "sm", "Session manager"));
AdminCommandIfc command = new AddScriptCommand();
command.init(AdminCommandIfc.ADD_SCRIPT_CMD, "New command script");
adminCommands.put(command.getCommandId(), command);
ServiceEntity item = new ServiceEntity(getName(),
"http://jabber.org/protocol/admin#" + command.getCommandId(),
command.getDescription());
item.addIdentities(
new ServiceIdentity("component", "generic", command.getDescription()),
new ServiceIdentity("automation", "command-node", command.getDescription()));
item.addFeatures(CMD_FEATURES);
serviceEntity.addItems(item);
command = new RemoveScriptCommand();
command.init(AdminCommandIfc.DEL_SCRIPT_CMD, "Remove command script");
adminCommands.put(command.getCommandId(), command);
item = new ServiceEntity(getName(),
"http://jabber.org/protocol/admin#" + command.getCommandId(),
command.getDescription());
item.addIdentities(
new ServiceIdentity("component", "generic", command.getDescription()),
new ServiceIdentity("automation", "command-node", command.getDescription()));
item.addFeatures(CMD_FEATURES);
serviceEntity.addItems(item);
List<ScriptEngineFactory> scriptFactories =
scriptEngineManager.getEngineFactories();
if (scriptFactories != null) {
for (ScriptEngineFactory scriptEngineFactory : scriptFactories) {
log.info("Found script engine for language: " +
scriptEngineFactory.getLanguageName() + ", version: " +
scriptEngineFactory.getLanguageVersion());
}
}
}
// private void debug_packet(String msg, Packet packet, String to) {
// if (packet.getElemTo().equals(to)) {
// log.finest(msg + ", packet: " + packet.getStringData());
// }
// }
protected XMPPResourceConnection getXMPPResourceConnection(String connId) {
return connectionsByFrom.get(connId);
}
protected XMPPResourceConnection getXMPPResourceConnection(Packet p) {
XMPPResourceConnection conn = null;
final String from = p.getFrom();
if (from != null) {
conn = connectionsByFrom.get(from);
if (conn != null) {
return conn.getConnectionStatus() == ConnectionStatus.TEMP ? null : conn;
}
}
// It might be a message _to_ some user on this server
// so let's look for established session for this user...
final String to = p.getElemTo();
if (to != null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Searching for resource connection for: " + to);
}
conn = getResourceConnection(to);
if (conn != null && conn.getConnectionStatus() == ConnectionStatus.TEMP) {
conn = null;
}
} else {
// Hm, not sure what should I do now....
// Maybe I should treat it as message to admin....
log.info("Message without TO attribute set, don't know what to do wih this: "
+ p.getStringData());
} // end of else
return conn;
}
protected boolean isBrokenPacket(Packet p) {
if (!p.getFrom().equals(p.getElemFrom()) && (!p.isCommand()
|| (p.isCommand() && p.getCommand() == Command.OTHER))) {
// Sometimes (Bosh) connection is gone and this is an error packet
// sent back to the original sender. This original sender might
// not local....
if (p.getElemFrom() != null &&
!isLocalDomain(JIDUtils.getNodeHost(p.getElemFrom()))) {
// ok just forward it there....
p.setFrom(null);
p.setTo(null);
fastAddOutPacket(p);
return true;
}
// It doesn't look good, there should reaaly be a connection for
// this packet....
// returning error back...
log.info("Broken packet: " + p.toString());
try {
Packet error =
Authorization.SERVICE_UNAVAILABLE.getResponseMessage(p,
"Service not available.", true);
error.setTo(p.getFrom());
fastAddOutPacket(error);
} catch (PacketErrorTypeException e) {
log.info("Packet processing exception: " + e);
}
return true;
}
return false;
}
@Override
public void processPacket(final Packet packet) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Received packet: " + packet.toString());
}
if (packet.isCommand() && processCommand(packet)) {
packet.processedBy("SessionManager");
// No more processing is needed for command packet
return;
} // end of if (pc.isCommand())
XMPPResourceConnection conn = getXMPPResourceConnection(packet);
if (conn == null && (isBrokenPacket(packet) ||
processAdminsOrDomains(packet))) {
return;
}
processPacket(packet, conn);
}
protected void processPacket(Packet packet, XMPPResourceConnection conn) {
packet.setTo(getComponentId());
if (log.isLoggable(Level.FINEST)) {
log.finest("processing packet: " + packet.toString() +
", connectionID: " +
(conn != null ? conn.getConnectionId() : "null"));
}
Queue<Packet> results = new LinkedList<Packet>();
boolean stop = false;
if (!stop) {
if (filter.preprocess(packet, conn, naUserRepository, results)) {
packet.processedBy("filter-foward");
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet preprocessed: " + packet.toString());
if (results.size() > 0) {
for (Packet p: results) {
log.finest("Preprocess result: " + p.toString());
}
}
}
addOutPackets(packet, conn, results);
return;
}
}
// Preprocess..., all preprocessors get all messages to look at.
// I am not sure if this is correct for now, let's try to do it this
// way and maybe change it later.
// If any of them returns true - it means processing should stop now.
// That is needed for preprocessors like privacy lists which should
// block certain packets.
if (!stop) {
for (XMPPPreprocessorIfc preproc: preProcessors.values()) {
stop |= preproc.preProcess(packet, conn, naUserRepository, results);
} // end of for (XMPPPreprocessorIfc preproc: preProcessors)
}
if (!stop) {
if (filter.forward(packet, conn, naUserRepository, results)) {
packet.processedBy("filter-foward");
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet forwarded: " + packet.toString());
}
addOutPackets(packet, conn, results);
return;
}
}
if (!stop) {
walk(packet, conn, packet.getElement(), results);
}
if (!stop) {
for (XMPPPostprocessorIfc postproc: postProcessors.values()) {
postproc.postProcess(packet, conn, naUserRepository, results);
} // end of for (XMPPPostprocessorIfc postproc: postProcessors)
} // end of if (!stop)
if (!stop && !packet.wasProcessed() && !isLocalDomain(packet.getElemTo())
&& filter.process(packet, conn, naUserRepository, results)) {
packet.processedBy("filter-process");
}
setPermissions(conn, results);
addOutPackets(packet, conn, results);
if (!packet.wasProcessed()) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet not processed: " + packet.toString());
}
Packet error = null;
if (stop
|| (conn == null
&& packet.getElemFrom() != null && packet.getElemTo() != null
&& packet.getElemTo() != getComponentId()
&& (packet.getElemName().equals("iq")
|| packet.getElemName().equals("message")))) {
try {
error = Authorization.SERVICE_UNAVAILABLE.getResponseMessage(packet,
"Service not available.", true);
} catch (PacketErrorTypeException e) {
log.info("Packet processing exception: " + e
+ ", packet: " + packet.toString());
}
} else {
if (packet.getElemFrom() != null || conn != null) {
try {
error = Authorization.FEATURE_NOT_IMPLEMENTED.getResponseMessage(packet,
"Feature not supported yet.", true);
} catch (PacketErrorTypeException e) {
log.info("Packet processing exception: " + e
+ ", packet: " + packet.toString());
}
}
}
if (error != null) {
if (error.getElemTo() != null) {
conn = getResourceConnection(error.getElemTo());
} // end of if (error.getElemTo() != null)
if (conn != null) {
error.setTo(conn.getConnectionId());
} // end of if (conn != null)
addOutPacket(error);
}
} else {
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet processed by: " + packet.getProcessorsIds().toString());
}
} // end of else
}
private void setPermissions(XMPPResourceConnection conn,
Queue<Packet> results) {
Permissions perms = Permissions.NONE;
if (conn != null) {
perms = Permissions.LOCAL;
if (conn.isAuthorized()) {
perms = Permissions.AUTH;
if (conn.isAnonymous()) {
perms = Permissions.ANONYM;
} else {
try {
String id = conn.getUserId();
if (isTrusted(id)) {
perms = Permissions.TRUSTED;
}
if (isAdmin(id)) {
perms = Permissions.ADMIN;
}
} catch (NotAuthorizedException e) {
perms = Permissions.NONE;
}
}
}
}
for (Packet res: results) {
res.setPermissions(perms);
}
}
// protected String[] getVHosts() {
// return hostnames;
// }
private boolean isAdmin(String jid) {
for (String adm: admins) {
if (adm.equals(JIDUtils.getNodeID(jid))) {
return true;
}
}
return false;
}
private boolean isTrusted(String jid) {
for (String trust: trusted) {
if (trust.equals(JIDUtils.getNodeID(jid))) {
return true;
}
}
return isAdmin(jid);
}
protected boolean processAdminsOrDomains(Packet packet) {
final String to = packet.getElemTo();
if (isLocalDomain(to)) {
if (packet.getElemName().equals("message")) {
// Yes this packet is for admin....
if (log.isLoggable(Level.FINER)) {
log.finer("Packet for admin: " + packet.getStringData());
}
sendToAdmins(packet);
} else {
if (log.isLoggable(Level.FINER)) {
log.finer("Packet for hostname: " + packet.getStringData());
}
Packet host_pac =
new Packet(packet.getElement().clone());
host_pac.getElement().setAttribute("to", getComponentId());
host_pac.getElement().setAttribute(Packet.OLDTO, packet.getElemTo());
host_pac.getElement().setAttribute(Packet.OLDFROM, packet.getElemFrom());
processPacket(host_pac);
}
return true;
} // end of if (isInRoutings(to))
return false;
}
protected void sendToAdmins(Packet packet) {
for (String admin: admins) {
if (log.isLoggable(Level.FINER)) {
log.finer("Sending packet to admin: " + admin);
}
Packet admin_pac =
new Packet(packet.getElement().clone());
admin_pac.getElement().setAttribute("to", admin);
processPacket(admin_pac);
}
}
protected XMPPSession getSession(String jid) {
return sessionsByNodeId.get(JIDUtils.getNodeID(jid));
}
protected XMPPResourceConnection getResourceConnection(String jid) {
XMPPSession session = getSession(jid);
if (session != null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Session not null, getting resource for jid: " + jid);
}
return session.getResourceConnection(jid);
} // end of if (session != null)
return null;
}
private void walk(final Packet packet,
final XMPPResourceConnection connection, final Element elem,
final Queue<Packet> results) {
for (ProcessorThreads proc_t: processors.values()) {
String xmlns = elem.getXMLNS();
if (xmlns == null) { xmlns = "jabber:client"; }
if (proc_t.processor.isSupporting(elem.getName(), xmlns)) {
if (log.isLoggable(Level.FINEST)) {
log.finest("XMPPProcessorIfc: "+proc_t.processor.getClass().getSimpleName()+
" ("+proc_t.processor.id()+")"+"\n Request: "+elem.toString()
+ (connection != null ? ", " + connection.getConnectionId() : " null"));
}
if (proc_t.addItem(packet, connection)) {
packet.processedBy(proc_t.processor.id());
} else {
// proc_t.debugQueue();
proc_t.packetDroped();
if (log.isLoggable(Level.FINE)) {
log.fine("Can not add packet: " + packet.toString() +
" to processor: " + proc_t.getName() +
" internal queue full.");
}
}
} // end of if (proc.isSupporting(elem.getName(), elem.getXMLNS()))
} // end of for ()
Collection<Element> children = elem.getChildren();
if (children != null) {
for (Element child: children) {
walk(packet, connection, child, results);
} // end of for (Element child: children)
} // end of if (children != null)
}
protected XMPPResourceConnection createUserSession(String conn_id,
String domain, String user_jid) {
XMPPResourceConnection connection = new XMPPResourceConnection(conn_id,
user_repository, auth_repository, this, false);
connection.setDomain(domain);
// Dummy session ID, we might decide later to set real thing here
connection.setSessionId("session-id-"+JIDUtils.getNodeNick(user_jid));
//connection.setAnonymousPeers(anon_peers);
connectionsByFrom.put(conn_id, connection);
registerNewSession(JIDUtils.getNodeID(user_jid), connection);
try {
connection.setResource(JIDUtils.getNodeResource(user_jid));
} catch (NotAuthorizedException e) {
log.warning("Something wrong with authorization: " + e
+ ", for user: " + user_jid);
}
return connection;
}
@Override
protected Integer getMaxQueueSize(int def) {
return def*10;
}
private boolean isAnonymousEnabled(String domain) {
return vHostManager != null ? vHostManager.isAnonymousEnabled(domain) :
false;
}
protected boolean processCommand(Packet pc) {
if (!(pc.getElemTo() == null) &&
!getComponentId().equals(pc.getElemTo()) &&
!isLocalDomain(pc.getElemTo())) {
return false;
}
boolean processing_result = false;
if (log.isLoggable(Level.FINER)) {
log.finer(pc.getCommand().toString() + " command from: " + pc.getFrom());
}
//Element command = pc.getElement();
XMPPResourceConnection connection = connectionsByFrom.get(pc.getFrom());
switch (pc.getCommand()) {
case STREAM_OPENED:
// It might be existing opened stream after TLS/SASL authorization
// If not, it means this is new stream
if (connection == null) {
if (log.isLoggable(Level.FINER)) {
log.finer("Adding resource connection for: " + pc.getFrom());
}
final String hostname = Command.getFieldValue(pc, "hostname");
connection = new XMPPResourceConnection(pc.getFrom(),
user_repository, auth_repository, this,
isAnonymousEnabled(hostname));
if (hostname != null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Setting hostname " + hostname
+ " for connection: " + connection.getConnectionId());
}
connection.setDomain(hostname);
} // end of if (hostname != null)
else {
connection.setDomain(getDefHostName());
} // end of if (hostname != null) else
//connection.setAnonymousPeers(anon_peers);
connectionsByFrom.put(pc.getFrom(), connection);
authenticationWatchdog.schedule(new AuthenticationTimer(pc.getFrom()),
MINUTE);
} else {
if (log.isLoggable(Level.FINEST)) {
log.finest("Stream opened for existing session, authorized: "
+ connection.isAuthorized());
}
} // end of else
connection.setSessionId(Command.getFieldValue(pc, "session-id"));
connection.setDefLang(Command.getFieldValue(pc, "xml:lang"));
if (log.isLoggable(Level.FINEST)) {
log.finest("Setting session-id " + connection.getSessionId()
+ " for connection: " + connection.getConnectionId());
}
fastAddOutPacket(pc.okResult((String) null, 0));
processing_result = true;
break;
case GETFEATURES:
if (pc.getType() == StanzaType.get) {
List<Element> features =
getFeatures(connectionsByFrom.get(pc.getFrom()));
Packet result = pc.commandResult(null);
Command.setData(result, features);
addOutPacket(result);
} // end of if (pc.getType() == StanzaType.get)
processing_result = true;
break;
case STREAM_CLOSED:
fastAddOutPacket(pc.okResult((String)null, 0));
closeConnection(pc.getFrom(), false);
processing_result = true;
break;
case BROADCAST_TO_ONLINE:
String from = pc.getFrom();
boolean trusted = false;
try {
trusted = (from != null && isTrusted(from))
|| (connection != null && isTrusted(connection.getUserId()));
} catch (NotAuthorizedException e) {
trusted = false;
}
try {
if (trusted) {
List<Element> packets = Command.getData(pc);
if (packets != null) {
for (XMPPResourceConnection conn: connectionsByFrom.values()) {
if (conn.isAuthorized()) {
try {
for (Element el_pack: packets) {
Element el_copy = el_pack.clone();
el_copy.setAttribute("to", conn.getJID());
Packet out_packet = new Packet(el_copy);
out_packet.setTo(conn.getConnectionId());
addOutPacket(out_packet);
}
} catch (NotAuthorizedException e) {
log.warning("Something wrong, connection is authenticated but "
+ "NoAuthorizedException is thrown.");
}
}
}
} else {
addOutPacket(Authorization.BAD_REQUEST.getResponseMessage(pc,
"Missing packets for broadcast.", true));
}
} else {
addOutPacket(Authorization.FORBIDDEN.getResponseMessage(pc,
"You don't have enough permission to brodcast packet.", true));
}
} catch (PacketErrorTypeException e) {
log.warning("Packet processing exception: " + e
+ ", packet: " + pc.toString());
}
processing_result = true;
break;
case USER_STATUS:
try {
if (isTrusted(pc.getElemFrom())
|| isTrusted(JIDUtils.getNodeHost(pc.getElemFrom()))) {
String av = Command.getFieldValue(pc, "available");
boolean available = !(av != null && av.equalsIgnoreCase("false"));
if (available) {
Packet presence = null;
Element p = pc.getElement().getChild("command").getChild("presence");
if (p != null) {
// + // use this hack to break XMLNS
// + Element el = new Element("presence");
// + el.setChildren(p.getChildren());
Element elem = p.clone();
elem.setXMLNS("jabber:client");
presence = new Packet(elem);
}
connection = connectionsByFrom.get(pc.getElemFrom());
if (connection == null) {
String user_jid = Command.getFieldValue(pc, "jid");
String hostname = JIDUtils.getNodeHost(user_jid);
connection = createUserSession(pc.getElemFrom(), hostname, user_jid);
connection.setSessionId("USER_STATUS");
user_repository.setData(JIDUtils.getNodeID(user_jid), "tokens",
"USER_STATUS", "USER_STATUS");
connection.loginToken("USER_STATUS", "USER_STATUS");
handleLogin(JIDUtils.getNodeNick(user_jid), connection);
connection.putSessionData("jingle", "active");
fastAddOutPacket(pc.okResult((String)null, 0));
if (presence == null) {
presence =
new Packet(new Element("presence",
new Element[] {
new Element("priority", "-1"),
new Element("c",
new String[] {"node", "ver", "ext", "xmlns"},
new String[] {"http://www.google.com/xmpp/client/caps",
XMPPServer.getImplementationVersion(),
"voice-v1",
"http://jabber.org/protocol/caps"})},
null, null));
}
} else {
// addOutPacket(Authorization.CONFLICT.getResponseMessage(pc,
// "The user resource already exists.", true));
if (log.isLoggable(Level.FINEST)) {
log.finest("USER_STATUS set to true for user who is already available: "
+ pc.toString());
}
}
if (presence != null) {
presence.setFrom(pc.getElemFrom());
presence.setTo(getComponentId());
addOutPacket(presence);
}
} else {
connection = connectionsByFrom.remove(pc.getElemFrom());
if (connection != null) {
closeSession(connection, false);
addOutPacket(pc.okResult((String)null, 0));
} else {
addOutPacket(Authorization.ITEM_NOT_FOUND.getResponseMessage(pc,
"The user resource you want to remove does not exist.", true));
log.info("Can not find resource connection for packet: " +
pc.toString());
}
}
} else {
try {
addOutPacket(Authorization.FORBIDDEN.getResponseMessage(pc,
"Only trusted entity can do it.", true));
} catch (PacketErrorTypeException e) {
log.warning("Packet error type when not expected: " + pc.toString());
}
}
} catch (Exception e) {
try {
addOutPacket(Authorization.UNDEFINED_CONDITION.getResponseMessage(pc,
"Unexpected error occured during the request: " + e, true));
} catch (Exception ex) { ex.printStackTrace(); }
log.log(Level.WARNING, "USER_STATUS session creation error: ", e);
}
processing_result = true;
break;
case REDIRECT:
if (connection != null) {
String action = Command.getFieldValue(pc, "action");
if (action.equals("close")) {
if (log.isLoggable(Level.FINE)) {
log.fine("Closing redirected connections: " + pc.getFrom());
}
sendAllOnHold(connection);
closeConnection(pc.getFrom(), true);
} else {
if (log.isLoggable(Level.FINE)) {
log.fine("Activating redirected connections: " + pc.getFrom());
}
}
} else {
if (log.isLoggable(Level.FINE)) {
log.fine("Redirect for non-existen connection: " + pc.toString());
}
}
processing_result = true;
break;
case OTHER:
String strCommand = pc.getStrCommand();
if (strCommand != null && strCommand.contains(ADMIN_COMMAND_NODE)) {
Command.Action action = Command.getAction(pc);
if (action != Command.Action.cancel) {
boolean admin = false;
try {
admin = connection != null && connection.isAuthorized() &&
isAdmin(connection.getUserId());
if (admin) {
if (log.isLoggable(Level.FINER)) {
log.finer("Processing admin command: " + pc.toString());
}
int hashIdx = strCommand.indexOf('#');
String scriptId = strCommand.substring(hashIdx + 1);
AdminCommandIfc com = adminCommands.get(scriptId);
if (com == null) {
Packet result = pc.commandResult(Command.DataType.result);
Command.addTextField(result, "Error", "The command: " + scriptId +
" is not available yet.");
fastAddOutPacket(result);
} else {
Bindings binds = scriptEngineManager.getBindings();
initBindings(binds);
Queue<Packet> results = new LinkedList<Packet>();
com.runCommand(pc, binds, results);
addOutPackets(results);
}
}
} catch (NotAuthorizedException e) {
admin = false;
} catch (Exception e) {
log.log(Level.WARNING,
"Unknown admin command processing exception: " +
pc.toString(), e);
}
if (!admin) {
try {
if (log.isLoggable(Level.FINER)) {
log.finer("Command rejected non-admin detected: " +
(connection != null ? (connection.isAuthorized() + ": " +
connection.getUserId())
: "null"));
}
addOutPacket(Authorization.FORBIDDEN.getResponseMessage(pc,
"Only Administrator can call the command.", true));
} catch (Exception e) {
log.info("Problem sending FORBIDDEN error: " + e +
", packet: " + pc.toString());
}
}
} else {
Packet result = pc.commandResult(Command.DataType.result);
Command.addTextField(result, "Note", "Command canceled.");
fastAddOutPacket(result);
}
processing_result = true;
} else {
log.info("Other command found: " + pc.getStrCommand());
}
break;
default:
break;
} // end of switch (pc.getCommand())
return processing_result;
}
private void initBindings(Bindings binds) {
binds.put(AdminCommandIfc.ADMN_CMDS, adminCommands);
binds.put(AdminCommandIfc.AUTH_REPO, auth_repository);
binds.put(AdminCommandIfc.USER_CONN, connectionsByFrom);
binds.put(AdminCommandIfc.USER_REPO, user_repository);
binds.put(AdminCommandIfc.USER_SESS, sessionsByNodeId);
binds.put(AdminCommandIfc.ADMN_DISC, serviceEntity);
binds.put(AdminCommandIfc.SCRI_MANA, scriptEngineManager);
}
@SuppressWarnings("unchecked")
protected void sendAllOnHold(XMPPResourceConnection conn) {
String remote_smId = (String)conn.getSessionData("redirect-to");
LinkedList<Packet> packets =
(LinkedList<Packet>)conn.getSessionData(SESSION_PACKETS);
if (remote_smId == null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("No address for remote SM to redirect packets, processing locally.");
}
if (packets != null) {
Packet sess_pack = null;
while (((sess_pack = packets.poll()) != null) &&
// Temporarily fix, need a better solution. For some reason
// the mode has been sent back from normal to on_hold during
// loop execution leading to infinite loop.
// Possibly buggy client sent a second authentication packet
// executing a second handleLogin call....
(conn.getConnectionStatus() != ConnectionStatus.ON_HOLD)) {
processPacket(sess_pack);
}
}
return;
}
conn.setConnectionStatus(ConnectionStatus.REDIRECT);
if (packets != null) {
Packet sess_pack = null;
while ((sess_pack = packets.poll()) != null) {
sess_pack.setTo(remote_smId);
fastAddOutPacket(sess_pack);
}
}
}
protected void closeConnection(String connectionId, boolean closeOnly) {
if (log.isLoggable(Level.FINER)) {
log.finer("Stream closed from: " + connectionId);
}
++closedConnections;
XMPPResourceConnection connection = connectionsByFrom.remove(connectionId);
if (connection != null) {
closeSession(connection, closeOnly);
} else {
log.fine("Can not find resource connection for packet: " + connectionId);
} // end of if (conn != null) else
}
private void closeSession(XMPPResourceConnection conn, boolean closeOnly) {
if (!closeOnly) {
Queue<Packet> results = new LinkedList<Packet>();
for (XMPPStopListenerIfc stopProc: stopListeners.values()) {
stopProc.stopped(conn, results, plugin_config.get(stopProc.id()));
} // end of for ()
addOutPackets(null, conn, results);
}
try {
if (conn.isAuthorized()
|| (conn.getConnectionStatus() == ConnectionStatus.TEMP)) {
String userId = conn.getUserId();
if (log.isLoggable(Level.FINE)) {
log.fine("Closing connection for: " + userId);
}
XMPPSession session = conn.getParentSession();
if (session != null) {
if (log.isLoggable(Level.FINE)) {
log.fine("Found parent session for: " + userId);
}
if (session.getActiveResourcesSize() <= 1) {
session = sessionsByNodeId.remove(userId);
if (session == null) {
log.info("UPS can't remove session, not found in map: " + userId);
} else {
if (log.isLoggable(Level.FINER)) {
log.finer("Number of user sessions: " + sessionsByNodeId.size());
}
} // end of else
auth_repository.logout(userId);
} else {
if (log.isLoggable(Level.FINER)) {
StringBuilder sb = new StringBuilder();
for (XMPPResourceConnection res_con: session.getActiveResources()) {
sb.append(", res=" + res_con.getResource() + " ("
+ res_con.getConnectionStatus() + ")");
}
log.finer("Number of connections is "
+ session.getActiveResourcesSize() + " for the user: " + userId
+ sb.toString());
}
} // end of else
} // end of if (session.getActiveResourcesSize() == 0)
}
} catch (NotAuthorizedException e) {
log.info("Closed not authorized session: " + e);
} catch (Exception e) {
log.log(Level.WARNING, "Exception closing session... ", e);
}
conn.streamClosed();
}
@Override
protected boolean addOutPacket(Packet packet) {
String oldto = packet.getAttribute(Packet.OLDTO);
if (oldto != null) {
packet.getElement().setAttribute("from", oldto);
packet.getElement().removeAttribute(Packet.OLDTO);
}
String oldfrom = packet.getAttribute(Packet.OLDFROM);
if (oldfrom != null) {
packet.getElement().setAttribute("to", oldfrom);
packet.getElement().removeAttribute(Packet.OLDFROM);
}
return super.addOutPacket(packet);
}
protected boolean fastAddOutPacket(Packet packet) {
return super.addOutPacket(packet);
}
protected void addOutPackets(Packet packet, XMPPResourceConnection conn,
Queue<Packet> results) {
for (XMPPPacketFilterIfc outfilter : outFilters.values()) {
outfilter.filter(packet, conn, naUserRepository, results);
} // end of for (XMPPPostprocessorIfc postproc: postProcessors)
addOutPackets(results);
}
// private XMPPSession getXMPPSession(Packet p) {
// return connectionsByFrom.get(p.getFrom()).getParentSession();
// }
private List<Element> getFeatures(XMPPResourceConnection session) {
List<Element> results = new LinkedList<Element>();
for (ProcessorThreads proc_t: processors.values()) {
Element[] features = proc_t.processor.supStreamFeatures(session);
if (features != null) {
results.addAll(Arrays.asList(features));
} // end of if (features != null)
} // end of for ()
return results;
}
@Override
public Map<String, Object> getDefaults(Map<String, Object> params) {
Map<String, Object> props = super.getDefaults(params);
SessionManagerConfig.getDefaults(props, params);
return props;
}
private void addPlugin(String comp_id) {
System.out.println("Loading plugin: " + comp_id + " ...");
XMPPProcessorIfc proc = ProcessorFactory.getProcessor(comp_id);
boolean loaded = false;
if (proc != null) {
ProcessorThreads pt = new ProcessorThreads(proc);
processors.put(comp_id, pt);
log.config("Added processor: " + proc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPPreprocessorIfc preproc = ProcessorFactory.getPreprocessor(comp_id);
if (preproc != null) {
preProcessors.put(comp_id, preproc);
log.config("Added preprocessor: " + preproc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPPostprocessorIfc postproc = ProcessorFactory.getPostprocessor(comp_id);
if (postproc != null) {
postProcessors.put(comp_id, postproc);
log.config("Added postprocessor: " + postproc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPStopListenerIfc stoplist = ProcessorFactory.getStopListener(comp_id);
if (stoplist != null) {
stopListeners.put(comp_id, stoplist);
log.config("Added stopped processor: " + stoplist.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPPacketFilterIfc filterproc = ProcessorFactory.getPacketFilter(comp_id);
if (filterproc != null) {
outFilters.put(comp_id, filterproc);
log.config("Added packet filter: " + filterproc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
if (!loaded) {
log.warning("No implementation found for plugin id: " + comp_id);
} // end of if (!loaded)
}
@Override
public void setProperties(Map<String, Object> props) {
super.setProperties(props);
Security.insertProviderAt(new TigaseSaslProvider(), 6);
filter = new PacketFilter();
// Is there a shared user repository pool? If so I want to use it:
user_repository = (UserRepository) props.get(SHARED_USER_REPO_POOL_PROP_KEY);
if (user_repository == null) {
// Is there shared user repository instance? If so I want to use it:
user_repository = (UserRepository) props.get(SHARED_USER_REPO_PROP_KEY);
} else {
log.config("Using shared repository pool.");
}
auth_repository = (UserAuthRepository) props.get(SHARED_AUTH_REPO_PROP_KEY);
if (user_repository != null) {
log.config("Using shared repository instance.");
} else {
Map<String, String> user_repo_params = new LinkedHashMap<String, String>();
Map<String, String> auth_repo_params = new LinkedHashMap<String, String>();
for (Map.Entry<String, Object> entry : props.entrySet()) {
if (entry.getKey().startsWith(USER_REPO_PARAMS_NODE)) {
// Split the key to configuration nodes separated with '/'
String[] nodes = entry.getKey().split("/");
// The plugin ID part may contain many IDs separated with comma ','
if (nodes.length > 1) {
user_repo_params.put(nodes[1], entry.getValue().toString());
}
}
if (entry.getKey().startsWith(AUTH_REPO_PARAMS_NODE)) {
// Split the key to configuration nodes separated with '/'
String[] nodes = entry.getKey().split("/");
// The plugin ID part may contain many IDs separated with comma ','
if (nodes.length > 1) {
auth_repo_params.put(nodes[1], entry.getValue().toString());
}
}
}
try {
String cls_name = (String) props.get(USER_REPO_CLASS_PROP_KEY);
String res_uri = (String) props.get(USER_REPO_URL_PROP_KEY);
user_repository = RepositoryFactory.getUserRepository(getName(),
cls_name, res_uri, user_repo_params);
log.config("Initialized " + cls_name + " as user repository: " + res_uri);
} catch (Exception e) {
log.log(Level.SEVERE, "Can't initialize user repository: ", e);
} // end of try-catch
try {
String cls_name = (String) props.get(AUTH_REPO_CLASS_PROP_KEY);
String res_uri = (String) props.get(AUTH_REPO_URL_PROP_KEY);
auth_repository = RepositoryFactory.getAuthRepository(getName(),
cls_name, res_uri, auth_repo_params);
log.config("Initialized " + cls_name + " as auth repository: " + res_uri);
} catch (Exception e) {
log.log(Level.SEVERE, "Can't initialize auth repository: ", e);
} // end of try-catch
}
naUserRepository = new NARepository(user_repository);
String[] plugins = (String[])props.get(PLUGINS_PROP_KEY);
maxPluginsNo = plugins.length;
processors.clear();
for (String comp_id: plugins) {
if (comp_id.equals("presence")) {
log.warning("Your configuration is outdated!"
+ " Note 'presence' and 'jaber:iq:roster' plugins are no longer exist."
+ " Use 'roster-presence' plugin instead, loading automaticly...");
comp_id = "roster-presence";
}
addPlugin(comp_id);
Map<String, Object> plugin_settings =
new ConcurrentSkipListMap<String, Object>();
for (Map.Entry<String, Object> entry: props.entrySet()) {
if (entry.getKey().startsWith(PLUGINS_CONF_PROP_KEY)) {
// Split the key to configuration nodes separated with '/'
String[] nodes = entry.getKey().split("/");
// The plugin ID part may contain many IDs separated with comma ','
if (nodes.length > 2) {
String[] ids = nodes[1].split(",");
Arrays.sort(ids);
if (Arrays.binarySearch(ids, comp_id) >= 0) {
plugin_settings.put(nodes[2], entry.getValue());
}
}
}
// if (entry.getKey().startsWith(PLUGINS_CONF_PROP_KEY + "/" + comp_id)) {
// plugin_settings.put(
// entry.getKey().substring((PLUGINS_CONF_PROP_KEY +
// "/" + comp_id + "/").length()), entry.getValue());
// }
}
if (plugin_settings.size() > 0) {
if (log.isLoggable(Level.FINEST)) {
log.finest(plugin_settings.toString());
}
plugin_config.put(comp_id, plugin_settings);
}
} // end of for (String comp_id: plugins)
// hostnames = (String[])props.get(HOSTNAMES_PROP_KEY);
// clearRoutings();
// for (String host: hostnames) {
// addRouting(host);
//// XMPPResourceConnection conn = createUserSession(NULL_ROUTING, host, host);
//// conn.setDummy(true);
// } // end of for ()
// addRouting(getComponentId());
// anonymous_domains.clear();
// anonymous_domains.addAll(
// Arrays.asList((String[])props.get(ANONYMOUS_DOMAINS_PROP_KEY)));
// serverSession =
createUserSession(NULL_ROUTING, getDefHostName(),
getComponentId() + "/server");
admins = (String[])props.get(ADMINS_PROP_KEY);
trusted = (String[])props.get(TRUSTED_PROP_KEY);
//anon_peers = (String[])props.get(ANONYMOUS_PEERS_PROP_KEY);
//Arrays.sort(anon_peers);
// Loading admin scripts....
String descrStr = "AS:Description: ";
String cmdIdStr = "AS:CommandId: ";
String scriptsPath = (String) props.get(ADMIN_SCRIPTS_PROP_KEY);
File file = null;
AddScriptCommand addCommand = new AddScriptCommand();
Bindings binds = scriptEngineManager.getBindings();
initBindings(binds);
try {
File adminDir = new File(scriptsPath);
for (File f : adminDir.listFiles()) {
String cmdId = null;
String cmdDescr = null;
file = f;
StringBuilder sb = new StringBuilder();
BufferedReader buffr = new BufferedReader(new FileReader(file));
String line = null;
while ((line = buffr.readLine()) != null) {
sb.append(line + "\n");
int idx = line.indexOf(descrStr);
if (idx >= 0) {
cmdDescr = line.substring(idx + descrStr.length());
}
idx = line.indexOf(cmdIdStr);
if (idx >= 0) {
cmdId = line.substring(idx + cmdIdStr.length());
}
}
buffr.close();
if (cmdId == null || cmdDescr == null) {
log.warning("Admin script found but it has no command ID or command description: " + file);
continue;
}
int idx = file.toString().lastIndexOf(".");
String ext = file.toString().substring(idx + 1);
addCommand.addAdminScript(cmdId, cmdDescr, sb.toString(), null,
ext, binds);
log.config("Loaded admin command from file: " + file +
", id: " + cmdId + ", ext: " + ext + ", descr: " + cmdDescr);
}
} catch (Exception e) {
log.log(Level.WARNING, "Can't load the admin script file: " + file, e);
}
}
@Override
public boolean handlesLocalDomains() {
return true;
}
protected void registerNewSession(String userId, XMPPResourceConnection conn) {
XMPPSession session = sessionsByNodeId.get(userId);
if (session == null) {
session = new XMPPSession(JIDUtils.getNodeNick(userId));
sessionsByNodeId.put(userId, session);
if (log.isLoggable(Level.FINEST)) {
log.finest("Created new XMPPSession for: " + userId);
}
} else {
// Check all other connections whether they are still alive....
List<XMPPResourceConnection> connections = session.getActiveResources();
if (connections != null) {
for (XMPPResourceConnection connection : connections) {
addOutPacketWithTimeout(Command.CHECK_USER_CONNECTION.getPacket(
getComponentId(), connection.getConnectionId(),
StanzaType.get, UUID.randomUUID().toString()),
connectionCheckCommandHandler, 7l, TimeUnit.SECONDS);
}
}
}
session.addResourceConnection(conn);
}
@Override
public void handleLogin(String userName, XMPPResourceConnection conn) {
if (log.isLoggable(Level.FINEST)) {
log.finest("handleLogin called for: " + userName + ", conn_id: " +
conn.getConnectionId());
}
String userId = JIDUtils.getNodeID(userName, conn.getDomain());
registerNewSession(userId, conn);
}
@Override
public void handleLogout(String userName, XMPPResourceConnection conn) {
String domain = conn.getDomain();
String userId = JIDUtils.getNodeID(userName, domain);
XMPPSession session = sessionsByNodeId.get(userId);
if (session != null && session.getActiveResourcesSize() <= 1) {
sessionsByNodeId.remove(userId);
} // end of if (session.getActiveResourcesSize() == 0)
connectionsByFrom.remove(conn.getConnectionId());
fastAddOutPacket(Command.CLOSE.getPacket(getComponentId(),
conn.getConnectionId(), StanzaType.set, conn.nextStanzaId()));
}
@Override
public Element getDiscoInfo(String node, String jid) {
if (jid != null && (getName().equals(JIDUtils.getNodeNick(jid)) ||
isLocalDomain(jid))) {
Element query = serviceEntity.getDiscoInfo(node);
if (node == null) {
for (ProcessorThreads proc_t : processors.values()) {
Element[] discoFeatures = proc_t.processor.supDiscoFeatures(null);
if (discoFeatures != null) {
query.addChildren(Arrays.asList(discoFeatures));
} // end of if (discoFeatures != null)
}
}
if (log.isLoggable(Level.FINEST)) {
log.finest("Found disco info: " +
(query != null ? query.toString() : null));
}
return query;
}
if (log.isLoggable(Level.FINEST)) {
log.finest("Not found disco info for node: " + node + ", jid: " + jid);
}
return null;
}
@Override
public List<Element> getDiscoFeatures() {
List<Element> features = new LinkedList<Element>();
for (ProcessorThreads proc_t: processors.values()) {
Element[] discoFeatures = proc_t.processor.supDiscoFeatures(null);
if (discoFeatures != null) {
features.addAll(Arrays.asList(discoFeatures));
} // end of if (discoFeatures != null)
}
return features;
}
@Override
public List<Element> getDiscoItems(String node, String jid) {
List<Element> result = serviceEntity.getDiscoItems(node, jid);
if (log.isLoggable(Level.FINEST)) {
log.finest("Found disco items: " +
(result != null ? result.toString() : null));
}
return result;
// if (jid != null && getName().equals(JIDUtils.getNodeNick(jid))) {
// return serviceEntity.getDiscoItems(node, jid);
// } else {
// return serviceEntity.getDiscoItems(node, jid);
// }
}
@Override
public List<StatRecord> getStatistics() {
List<StatRecord> stats = super.getStatistics();
stats.add(new StatRecord(getName(), "Open connections", "int",
connectionsByFrom.size(), Level.FINE));
stats.add(new StatRecord(getName(), "Registered accounts", "long",
user_repository.getUsersCount(), Level.INFO));
stats.add(new StatRecord(getName(), "Open authorized sessions", "int",
sessionsByNodeId.size(), Level.INFO));
stats.add(new StatRecord(getName(), "Closed connections", "long",
closedConnections, Level.FINER));
stats.add(new StatRecord(getName(), "Authentication timouts", "long",
authTimeouts, Level.FINEST));
for (Map.Entry<String, ProcessorThreads> procent : processors.entrySet()) {
ProcessorThreads proc = procent.getValue();
if (proc.getName().equals("roster-presence")) {
stats.add(new StatRecord(getName(), "Processor: " + procent.getKey(),
"String", "Queue: " + proc.getTotalQueueSize() +
", AvTime: " + proc.averageTime() +
", Runs: " + proc.cntRuns + ", Lost: " + proc.dropedPackets,
Level.INFO));
} else {
stats.add(new StatRecord(getName(), "Processor: " + procent.getKey(),
"String", "Queue: " + proc.getTotalQueueSize() +
", AvTime: " + proc.averageTime() +
", Runs: " + proc.cntRuns + ", Lost: " + proc.dropedPackets,
Level.FINEST));
}
}
return stats;
}
@Override
public boolean isLocalDomain(String domain, boolean includeComponents) {
if (includeComponents) {
return isLocalDomainOrComponent(domain);
} else {
return isLocalDomain(domain);
}
}
private class QueueItem {
Packet packet;
XMPPResourceConnection conn;
}
private class ProcessorThreads {
private boolean stopped = false;
private XMPPProcessorIfc processor = null;
private LinkedList<ProcessorWorkerThread> workerThreads =
new LinkedList<ProcessorWorkerThread>();
// private PriorityQueue<QueueItem> nullQueue = new PriorityQueue<QueueItem>(
// Priority.values().length, maxQueueSize / maxPluginsNo);
private ArrayList<PriorityQueue<QueueItem>> queues =
new ArrayList<PriorityQueue<QueueItem>>();
// Packets are put in queues in such a way that all packets for the same
// user end-up in the same queue. This is important in some cases as
// packet processing order does matter in some cases, especially for
// roster processing.
// Therefore it is also recommended that there is a single thread for
// each queue but we can ditribute load increasing number of queues.
private int numQueues = 2;
private int numWorkerThreads = 1;
private int maxQueueSizeDef = maxQueueSize/maxPluginsNo;
private long cntRuns = 0;
private long dropedPackets = 0;
public ProcessorThreads(XMPPProcessorIfc processor) {
this.processor = processor;
numQueues = processor.concurrentQueuesNo();
numWorkerThreads = processor.concurrentThreadsPerQueue();
if ("roster-presence".equals(processor.id()) ||
"jabber:iq:auth".equals(processor.id()) ||
"urn:ietf:params:xml:ns:xmpp-sasl".equals(processor.id())) {
maxQueueSizeDef = maxQueueSize * 10;
}
for (int i = 0; i < numQueues; i++) {
queues.add(new PriorityQueue<QueueItem>(Priority.values().length,
maxQueueSizeDef));
for (int j = 0; j < numWorkerThreads; j++) {
ProcessorWorkerThread t = new ProcessorWorkerThread(queues.get(i));
t.setDaemon(true);
t.setName(processor.id() + " Queue " + i + " Worker " + j);
t.start();
workerThreads.add(t);
}
}
// ProcessorWorkerThread t = new ProcessorWorkerThread(nullQueue);
// t.setDaemon(true);
// t.setName(processor.id() + " Null Queue Worker");
// t.start();
// workerThreads.add(t);
}
public String getName() {
return processor.id();
}
private int getTotalQueueSize() {
int ret = 0;
for (PriorityQueue<QueueItem> pq : queues) {
ret += pq.totalSize();
}
// ret += nullQueue.totalSize();
return ret;
}
public boolean addItem(Packet packet, XMPPResourceConnection conn) {
boolean ret = false;
QueueItem item = new QueueItem();
item.conn = conn;
item.packet = packet;
try {
// Queueing packets per user...
ret = queues.get(Math.abs(conn.getUserId().hashCode() %
numQueues)).offer(item, packet.getPriority().ordinal());
} catch (Exception e) {
// Otherwise per destination address
ret = queues.get(Math.abs(packet.getTo().hashCode() %
numQueues)).offer(item, packet.getPriority().ordinal());
//ret = nullQueue.offer(item, packet.getPriority().ordinal());
}
return ret;
}
private void packetDroped() {
++dropedPackets;
}
private long averageTime() {
long average = 0;
for (ProcessorWorkerThread processorWorkerThread : workerThreads) {
average += processorWorkerThread.cntAverageTime;
}
return average / workerThreads.size();
}
private class ProcessorWorkerThread
extends Thread {
private LinkedList<Packet> local_results = new LinkedList<Packet>();
private PriorityQueue<QueueItem> queue = null;
private long cntAverageTime = 0;
private ProcessorWorkerThread(PriorityQueue<QueueItem> queue) {
this.queue = queue;
}
@Override
public void run() {
while (!stopped) {
QueueItem item = null;
try {
//XXX - not very nice, getting the current time can be slooooooow
item = queue.take();
long start = System.currentTimeMillis();
if (item.conn != null) {
// Not sure if this synchronization is needed at all
synchronized (item.conn) {
processor.process(item.packet, item.conn, naUserRepository,
local_results, plugin_config.get(processor.id()));
setPermissions(item.conn, local_results);
}
} else {
processor.process(item.packet, null, naUserRepository,
local_results, plugin_config.get(processor.id()));
}
addOutPackets(item.packet, item.conn, local_results);
++cntRuns;
cntAverageTime =
(cntAverageTime + (System.currentTimeMillis() - start)) / 2;
} catch (PacketErrorTypeException e) {
log.info("Already error packet, ignoring: " + item.packet.toString());
} catch (Exception e) {
log.log(Level.SEVERE, "Exception during packet processing: " +
item.packet.toString(), e);
}
}
}
}
}
private class AuthenticationTimer extends TimerTask {
private String connId = null;
private AuthenticationTimer(String connId) {
this.connId = connId;
}
@Override
public void run() {
XMPPResourceConnection conn = connectionsByFrom.get(connId);
if (conn != null && !conn.isAuthorized()) {
connectionsByFrom.remove(connId);
++authTimeouts;
log.info("Authentication timeout expired, closing connection: " + connId);
fastAddOutPacket(Command.CLOSE.getPacket(getComponentId(),
connId, StanzaType.set, conn.nextStanzaId()));
}
}
}
private class ConnectionCheckCommandHandler implements ReceiverEventHandler {
@Override
public void timeOutExpired(Packet packet) {
if (log.isLoggable(Level.FINER)) {
log.finer("Connection checker timeout expired, closing connection: " +
packet.getTo());
}
closeConnection(packet.getTo(), false);
}
@Override
public void responseReceived(Packet packet, Packet response) {
if (response.getType() == StanzaType.error) {
if (log.isLoggable(Level.FINER)) {
log.finer("Connection checker error received, closing connection: " +
packet.getTo());
}
// The connection is not longer active, closing the user session here.
closeConnection(packet.getTo(), false);
}
}
}
private static class NARepository implements NonAuthUserRepository {
UserRepository rep = null;
NARepository(UserRepository userRep) {
rep = userRep;
}
private String calcNode(String base, String subnode) {
if (subnode == null) {
return base;
} // end of if (subnode == null)
return base + "/" + subnode;
}
@Override
public String getPublicData(String user, String subnode, String key,
String def) throws UserNotFoundException {
try {
return (rep.userExists(user) ?
rep.getData(user, calcNode(PUBLIC_DATA_NODE, subnode), key, def) :
null);
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
return null;
} // end of try-catch
}
@Override
public String[] getPublicDataList(String user, String subnode, String key)
throws UserNotFoundException {
try {
return (rep.userExists(user) ?
rep.getDataList(user, calcNode(PUBLIC_DATA_NODE, subnode), key) :
null);
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
return null;
} // end of try-catch
}
@Override
public void addOfflineDataList(String user, String subnode, String key,
String[] list) throws UserNotFoundException {
try {
if (rep.userExists(user)) {
rep.addDataList(user, calcNode(OFFLINE_DATA_NODE, subnode), key, list);
} else {
throw new UserNotFoundException("User: " + user
+ " has not been found inthe repository.");
}
} catch (UserNotFoundException e) {
// This is quite normal for anonymous users.
log.info("User not found in repository: " + user);
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
} // end of try-catch
}
@Override
public void addOfflineData(String user, String subnode, String key,
String value) throws UserNotFoundException, DataOverwriteException {
String node = calcNode(OFFLINE_DATA_NODE, subnode);
try {
String data = rep.getData(user, node, key);
if (data == null) {
rep.setData(user, node, key, value);
} else {
throw new
DataOverwriteException("Not authorized attempt to overwrite data.");
} // end of if (data == null) else
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
} // end of try-catch
}
}
}
| src/main/java/tigase/server/xmppsession/SessionManager.java | /*
* Tigase Jabber/XMPP Server
* Copyright (C) 2004-2007 "Artur Hefczyc" <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. Look for COPYING file in the top folder.
* If not, see http://www.gnu.org/licenses/.
*
* $Rev$
* Last modified by $Author$
* $Date$
*/
package tigase.server.xmppsession;
//import tigase.auth.TigaseConfiguration;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.security.Security;
import java.util.Arrays;
import java.util.Collection;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.LinkedHashMap;
import java.util.Queue;
import java.util.Timer;
import java.util.TimerTask;
import java.util.UUID;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.Enumeration;
import javax.script.Bindings;
import javax.script.ScriptEngineFactory;
import javax.script.ScriptEngineManager;
import tigase.auth.TigaseSaslProvider;
import tigase.conf.Configurable;
import tigase.db.DataOverwriteException;
import tigase.db.NonAuthUserRepository;
import tigase.db.RepositoryFactory;
import tigase.db.TigaseDBException;
import tigase.db.UserAuthRepository;
import tigase.db.UserNotFoundException;
import tigase.db.UserRepository;
import tigase.disco.ServiceEntity;
import tigase.disco.ServiceIdentity;
import tigase.disco.XMPPService;
import tigase.server.AbstractMessageReceiver;
import tigase.server.Command;
import tigase.server.Packet;
import tigase.server.Permissions;
import tigase.server.Priority;
import tigase.server.ReceiverEventHandler;
import tigase.server.XMPPServer;
import tigase.stats.StatRecord;
import tigase.util.JIDUtils;
import tigase.util.PriorityQueue;
import tigase.xml.Element;
import tigase.xmpp.Authorization;
import tigase.xmpp.NotAuthorizedException;
import tigase.xmpp.PacketErrorTypeException;
import tigase.xmpp.ProcessorFactory;
import tigase.xmpp.StanzaType;
import tigase.xmpp.XMPPPostprocessorIfc;
import tigase.xmpp.XMPPPreprocessorIfc;
import tigase.xmpp.XMPPProcessorIfc;
import tigase.xmpp.XMPPResourceConnection;
import tigase.xmpp.XMPPSession;
import tigase.xmpp.XMPPStopListenerIfc;
import tigase.xmpp.ConnectionStatus;
import tigase.xmpp.XMPPPacketFilterIfc;
import static tigase.server.xmppsession.SessionManagerConfig.*;
/**
* Class SessionManager
*
*
* Created: Tue Nov 22 07:07:11 2005
*
* @author <a href="mailto:[email protected]">Artur Hefczyc</a>
* @author <a href="mailto:[email protected]">Peter Sandström</a> - multi-threadin
* support for pluggins processors.
* @version $Rev$
*/
public class SessionManager extends AbstractMessageReceiver
implements Configurable, XMPPService, SessionManagerHandler {
/**
* Variable <code>log</code> is a class logger.
*/
private static final Logger log =
Logger.getLogger(SessionManager.class.getName());
protected static final String SESSION_PACKETS = "session-packets";
protected static final String ADMIN_COMMAND_NODE =
"http://jabber.org/protocol/admin";
private UserRepository user_repository = null;
private UserAuthRepository auth_repository = null;
private NonAuthUserRepository naUserRepository = null;
private PacketFilter filter = null;
//private String[] hostnames = {"localhost"};
private String[] admins = {"admin@localhost"};
private String[] trusted = {"admin@localhost"};
//private String[] anon_peers = {"admin@localhost"};
private ConcurrentHashMap<String, XMPPSession> sessionsByNodeId =
new ConcurrentHashMap<String, XMPPSession>();
private ConcurrentHashMap<String, XMPPResourceConnection> connectionsByFrom =
new ConcurrentHashMap<String, XMPPResourceConnection>();
private Map<String, XMPPPreprocessorIfc> preProcessors =
new ConcurrentSkipListMap<String, XMPPPreprocessorIfc>();
private Map<String, ProcessorThreads> processors =
new ConcurrentSkipListMap<String, ProcessorThreads>();
private Map<String, XMPPPostprocessorIfc> postProcessors =
new ConcurrentSkipListMap<String, XMPPPostprocessorIfc>();
private Map<String, XMPPStopListenerIfc> stopListeners =
new ConcurrentSkipListMap<String, XMPPStopListenerIfc>();
private Map<String, Map<String, Object>> plugin_config =
new ConcurrentSkipListMap<String, Map<String, Object>>();
private Map<String, XMPPPacketFilterIfc> outFilters =
new ConcurrentSkipListMap<String, XMPPPacketFilterIfc>();
private Map<String, AdminCommandIfc> adminCommands =
new ConcurrentSkipListMap<String, AdminCommandIfc>();
private Timer authenticationWatchdog = new Timer("SM authentocation watchdog");
private ScriptEngineManager scriptEngineManager = new ScriptEngineManager();
private ConnectionCheckCommandHandler connectionCheckCommandHandler =
new ConnectionCheckCommandHandler();
//private Set<String> anonymous_domains = new HashSet<String>();
//private XMPPResourceConnection serverSession = null;
private ServiceEntity serviceEntity = null;
// private ServiceEntity adminDisco = null;
private long closedConnections = 0;
private long authTimeouts = 0;
private int maxPluginsNo = 0;
private Timer reaperTask = null;
private long reaperInterval = 60 * 1000;
private long maxIdleTime = 86400 * 1000;
@Override
public void start() {
super.start();
reaperTask = new Timer("Session reaper task", true);
reaperTask.schedule(new TimerTask() {
@Override
public void run() {
long currentTime = System.currentTimeMillis();
for (Enumeration<XMPPResourceConnection> e = connectionsByFrom.elements(); e.hasMoreElements(); ) {
XMPPResourceConnection xrc = e.nextElement();
if (!"session-id-sess-man".equals(xrc.getSessionId())) {
if (currentTime - xrc.getLastAccessed() > maxIdleTime && currentTime - xrc.getCreationTime() > reaperInterval) {
if (log.isLoggable(Level.WARNING)) {
log.warning("Logging out " + xrc.getSessionId() + " after >" + (maxIdleTime/1000) + " seconds of inactivity");
}
try {
xrc.logout();
} catch (NotAuthorizedException ex) {
if (log.isLoggable(Level.WARNING)) {
log.warning("Could not logout " + xrc.getSessionId() + ": " + ex.getMessage());
}
}
}
}
}
}
}, reaperInterval, reaperInterval);
}
@Override
public void stop() {
super.stop();
reaperTask.cancel();
reaperTask = null;
}
@Override
public void setName(String name) {
super.setName(name);
serviceEntity = new ServiceEntity(name, "sm", "Session manager");
serviceEntity.addIdentities(
new ServiceIdentity("component", "sm", "Session manager"));
AdminCommandIfc command = new AddScriptCommand();
command.init(AdminCommandIfc.ADD_SCRIPT_CMD, "New command script");
adminCommands.put(command.getCommandId(), command);
ServiceEntity item = new ServiceEntity(getName(),
"http://jabber.org/protocol/admin#" + command.getCommandId(),
command.getDescription());
item.addIdentities(
new ServiceIdentity("component", "generic", command.getDescription()),
new ServiceIdentity("automation", "command-node", command.getDescription()));
item.addFeatures(CMD_FEATURES);
serviceEntity.addItems(item);
command = new RemoveScriptCommand();
command.init(AdminCommandIfc.DEL_SCRIPT_CMD, "Remove command script");
adminCommands.put(command.getCommandId(), command);
item = new ServiceEntity(getName(),
"http://jabber.org/protocol/admin#" + command.getCommandId(),
command.getDescription());
item.addIdentities(
new ServiceIdentity("component", "generic", command.getDescription()),
new ServiceIdentity("automation", "command-node", command.getDescription()));
item.addFeatures(CMD_FEATURES);
serviceEntity.addItems(item);
List<ScriptEngineFactory> scriptFactories =
scriptEngineManager.getEngineFactories();
if (scriptFactories != null) {
for (ScriptEngineFactory scriptEngineFactory : scriptFactories) {
log.info("Found script engine for language: " +
scriptEngineFactory.getLanguageName() + ", version: " +
scriptEngineFactory.getLanguageVersion());
}
}
}
// private void debug_packet(String msg, Packet packet, String to) {
// if (packet.getElemTo().equals(to)) {
// log.finest(msg + ", packet: " + packet.getStringData());
// }
// }
protected XMPPResourceConnection getXMPPResourceConnection(String connId) {
return connectionsByFrom.get(connId);
}
protected XMPPResourceConnection getXMPPResourceConnection(Packet p) {
XMPPResourceConnection conn = null;
final String from = p.getFrom();
if (from != null) {
conn = connectionsByFrom.get(from);
if (conn != null) {
return conn.getConnectionStatus() == ConnectionStatus.TEMP ? null : conn;
}
}
// It might be a message _to_ some user on this server
// so let's look for established session for this user...
final String to = p.getElemTo();
if (to != null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Searching for resource connection for: " + to);
}
conn = getResourceConnection(to);
if (conn != null && conn.getConnectionStatus() == ConnectionStatus.TEMP) {
conn = null;
}
} else {
// Hm, not sure what should I do now....
// Maybe I should treat it as message to admin....
log.info("Message without TO attribute set, don't know what to do wih this: "
+ p.getStringData());
} // end of else
return conn;
}
protected boolean isBrokenPacket(Packet p) {
if (!p.getFrom().equals(p.getElemFrom()) && (!p.isCommand()
|| (p.isCommand() && p.getCommand() == Command.OTHER))) {
// Sometimes (Bosh) connection is gone and this is an error packet
// sent back to the original sender. This original sender might
// not local....
if (p.getElemFrom() != null &&
!isLocalDomain(JIDUtils.getNodeHost(p.getElemFrom()))) {
// ok just forward it there....
p.setFrom(null);
p.setTo(null);
fastAddOutPacket(p);
return true;
}
// It doesn't look good, there should reaaly be a connection for
// this packet....
// returning error back...
log.info("Broken packet: " + p.toString());
try {
Packet error =
Authorization.SERVICE_UNAVAILABLE.getResponseMessage(p,
"Service not available.", true);
error.setTo(p.getFrom());
fastAddOutPacket(error);
} catch (PacketErrorTypeException e) {
log.info("Packet processing exception: " + e);
}
return true;
}
return false;
}
@Override
public void processPacket(final Packet packet) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Received packet: " + packet.toString());
}
if (packet.isCommand() && processCommand(packet)) {
packet.processedBy("SessionManager");
// No more processing is needed for command packet
return;
} // end of if (pc.isCommand())
XMPPResourceConnection conn = getXMPPResourceConnection(packet);
if (conn == null && (isBrokenPacket(packet) ||
processAdminsOrDomains(packet))) {
return;
}
processPacket(packet, conn);
}
protected void processPacket(Packet packet, XMPPResourceConnection conn) {
packet.setTo(getComponentId());
if (log.isLoggable(Level.FINEST)) {
log.finest("processing packet: " + packet.toString() +
", connectionID: " +
(conn != null ? conn.getConnectionId() : "null"));
}
Queue<Packet> results = new LinkedList<Packet>();
boolean stop = false;
if (!stop) {
if (filter.preprocess(packet, conn, naUserRepository, results)) {
packet.processedBy("filter-foward");
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet preprocessed: " + packet.toString());
if (results.size() > 0) {
for (Packet p: results) {
log.finest("Preprocess result: " + p.toString());
}
}
}
addOutPackets(packet, conn, results);
return;
}
}
// Preprocess..., all preprocessors get all messages to look at.
// I am not sure if this is correct for now, let's try to do it this
// way and maybe change it later.
// If any of them returns true - it means processing should stop now.
// That is needed for preprocessors like privacy lists which should
// block certain packets.
if (!stop) {
for (XMPPPreprocessorIfc preproc: preProcessors.values()) {
stop |= preproc.preProcess(packet, conn, naUserRepository, results);
} // end of for (XMPPPreprocessorIfc preproc: preProcessors)
}
if (!stop) {
if (filter.forward(packet, conn, naUserRepository, results)) {
packet.processedBy("filter-foward");
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet forwarded: " + packet.toString());
}
addOutPackets(packet, conn, results);
return;
}
}
if (!stop) {
walk(packet, conn, packet.getElement(), results);
}
if (!stop) {
for (XMPPPostprocessorIfc postproc: postProcessors.values()) {
postproc.postProcess(packet, conn, naUserRepository, results);
} // end of for (XMPPPostprocessorIfc postproc: postProcessors)
} // end of if (!stop)
if (!stop && !packet.wasProcessed() && !isLocalDomain(packet.getElemTo())
&& filter.process(packet, conn, naUserRepository, results)) {
packet.processedBy("filter-process");
}
setPermissions(conn, results);
addOutPackets(packet, conn, results);
if (!packet.wasProcessed()) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet not processed: " + packet.toString());
}
Packet error = null;
if (stop
|| (conn == null
&& packet.getElemFrom() != null && packet.getElemTo() != null
&& packet.getElemTo() != getComponentId()
&& (packet.getElemName().equals("iq")
|| packet.getElemName().equals("message")))) {
try {
error = Authorization.SERVICE_UNAVAILABLE.getResponseMessage(packet,
"Service not available.", true);
} catch (PacketErrorTypeException e) {
log.info("Packet processing exception: " + e
+ ", packet: " + packet.toString());
}
} else {
if (packet.getElemFrom() != null || conn != null) {
try {
error = Authorization.FEATURE_NOT_IMPLEMENTED.getResponseMessage(packet,
"Feature not supported yet.", true);
} catch (PacketErrorTypeException e) {
log.info("Packet processing exception: " + e
+ ", packet: " + packet.toString());
}
}
}
if (error != null) {
if (error.getElemTo() != null) {
conn = getResourceConnection(error.getElemTo());
} // end of if (error.getElemTo() != null)
if (conn != null) {
error.setTo(conn.getConnectionId());
} // end of if (conn != null)
addOutPacket(error);
}
} else {
if (log.isLoggable(Level.FINEST)) {
log.finest("Packet processed by: " + packet.getProcessorsIds().toString());
}
} // end of else
}
private void setPermissions(XMPPResourceConnection conn,
Queue<Packet> results) {
Permissions perms = Permissions.NONE;
if (conn != null) {
perms = Permissions.LOCAL;
if (conn.isAuthorized()) {
perms = Permissions.AUTH;
if (conn.isAnonymous()) {
perms = Permissions.ANONYM;
} else {
try {
String id = conn.getUserId();
if (isTrusted(id)) {
perms = Permissions.TRUSTED;
}
if (isAdmin(id)) {
perms = Permissions.ADMIN;
}
} catch (NotAuthorizedException e) {
perms = Permissions.NONE;
}
}
}
}
for (Packet res: results) {
res.setPermissions(perms);
}
}
// protected String[] getVHosts() {
// return hostnames;
// }
private boolean isAdmin(String jid) {
for (String adm: admins) {
if (adm.equals(JIDUtils.getNodeID(jid))) {
return true;
}
}
return false;
}
private boolean isTrusted(String jid) {
for (String trust: trusted) {
if (trust.equals(JIDUtils.getNodeID(jid))) {
return true;
}
}
return isAdmin(jid);
}
protected boolean processAdminsOrDomains(Packet packet) {
final String to = packet.getElemTo();
if (isLocalDomain(to)) {
if (packet.getElemName().equals("message")) {
// Yes this packet is for admin....
if (log.isLoggable(Level.FINER)) {
log.finer("Packet for admin: " + packet.getStringData());
}
sendToAdmins(packet);
} else {
if (log.isLoggable(Level.FINER)) {
log.finer("Packet for hostname: " + packet.getStringData());
}
Packet host_pac =
new Packet(packet.getElement().clone());
host_pac.getElement().setAttribute("to", getComponentId());
host_pac.getElement().setAttribute(Packet.OLDTO, packet.getElemTo());
host_pac.getElement().setAttribute(Packet.OLDFROM, packet.getElemFrom());
processPacket(host_pac);
}
return true;
} // end of if (isInRoutings(to))
return false;
}
protected void sendToAdmins(Packet packet) {
for (String admin: admins) {
if (log.isLoggable(Level.FINER)) {
log.finer("Sending packet to admin: " + admin);
}
Packet admin_pac =
new Packet(packet.getElement().clone());
admin_pac.getElement().setAttribute("to", admin);
processPacket(admin_pac);
}
}
protected XMPPSession getSession(String jid) {
return sessionsByNodeId.get(JIDUtils.getNodeID(jid));
}
protected XMPPResourceConnection getResourceConnection(String jid) {
XMPPSession session = getSession(jid);
if (session != null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Session not null, getting resource for jid: " + jid);
}
return session.getResourceConnection(jid);
} // end of if (session != null)
return null;
}
private void walk(final Packet packet,
final XMPPResourceConnection connection, final Element elem,
final Queue<Packet> results) {
for (ProcessorThreads proc_t: processors.values()) {
String xmlns = elem.getXMLNS();
if (xmlns == null) { xmlns = "jabber:client"; }
if (proc_t.processor.isSupporting(elem.getName(), xmlns)) {
if (log.isLoggable(Level.FINEST)) {
log.finest("XMPPProcessorIfc: "+proc_t.processor.getClass().getSimpleName()+
" ("+proc_t.processor.id()+")"+"\n Request: "+elem.toString()
+ (connection != null ? ", " + connection.getConnectionId() : " null"));
}
if (proc_t.addItem(packet, connection)) {
packet.processedBy(proc_t.processor.id());
} else {
// proc_t.debugQueue();
proc_t.packetDroped();
if (log.isLoggable(Level.FINE)) {
log.fine("Can not add packet: " + packet.toString() +
" to processor: " + proc_t.getName() +
" internal queue full.");
}
}
} // end of if (proc.isSupporting(elem.getName(), elem.getXMLNS()))
} // end of for ()
Collection<Element> children = elem.getChildren();
if (children != null) {
for (Element child: children) {
walk(packet, connection, child, results);
} // end of for (Element child: children)
} // end of if (children != null)
}
protected XMPPResourceConnection createUserSession(String conn_id,
String domain, String user_jid) {
XMPPResourceConnection connection = new XMPPResourceConnection(conn_id,
user_repository, auth_repository, this, false);
connection.setDomain(domain);
// Dummy session ID, we might decide later to set real thing here
connection.setSessionId("session-id-"+JIDUtils.getNodeNick(user_jid));
//connection.setAnonymousPeers(anon_peers);
connectionsByFrom.put(conn_id, connection);
registerNewSession(JIDUtils.getNodeID(user_jid), connection);
try {
connection.setResource(JIDUtils.getNodeResource(user_jid));
} catch (NotAuthorizedException e) {
log.warning("Something wrong with authorization: " + e
+ ", for user: " + user_jid);
}
return connection;
}
@Override
protected Integer getMaxQueueSize(int def) {
return def*10;
}
private boolean isAnonymousEnabled(String domain) {
return vHostManager != null ? vHostManager.isAnonymousEnabled(domain) :
false;
}
protected boolean processCommand(Packet pc) {
if (!(pc.getElemTo() == null) &&
!getComponentId().equals(pc.getElemTo()) &&
!isLocalDomain(pc.getElemTo())) {
return false;
}
boolean processing_result = false;
if (log.isLoggable(Level.FINER)) {
log.finer(pc.getCommand().toString() + " command from: " + pc.getFrom());
}
//Element command = pc.getElement();
XMPPResourceConnection connection = connectionsByFrom.get(pc.getFrom());
switch (pc.getCommand()) {
case STREAM_OPENED:
// It might be existing opened stream after TLS/SASL authorization
// If not, it means this is new stream
if (connection == null) {
if (log.isLoggable(Level.FINER)) {
log.finer("Adding resource connection for: " + pc.getFrom());
}
final String hostname = Command.getFieldValue(pc, "hostname");
connection = new XMPPResourceConnection(pc.getFrom(),
user_repository, auth_repository, this,
isAnonymousEnabled(hostname));
if (hostname != null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("Setting hostname " + hostname
+ " for connection: " + connection.getConnectionId());
}
connection.setDomain(hostname);
} // end of if (hostname != null)
else {
connection.setDomain(getDefHostName());
} // end of if (hostname != null) else
//connection.setAnonymousPeers(anon_peers);
connectionsByFrom.put(pc.getFrom(), connection);
authenticationWatchdog.schedule(new AuthenticationTimer(pc.getFrom()),
MINUTE);
} else {
if (log.isLoggable(Level.FINEST)) {
log.finest("Stream opened for existing session, authorized: "
+ connection.isAuthorized());
}
} // end of else
connection.setSessionId(Command.getFieldValue(pc, "session-id"));
connection.setDefLang(Command.getFieldValue(pc, "xml:lang"));
if (log.isLoggable(Level.FINEST)) {
log.finest("Setting session-id " + connection.getSessionId()
+ " for connection: " + connection.getConnectionId());
}
fastAddOutPacket(pc.okResult((String) null, 0));
processing_result = true;
break;
case GETFEATURES:
if (pc.getType() == StanzaType.get) {
List<Element> features =
getFeatures(connectionsByFrom.get(pc.getFrom()));
Packet result = pc.commandResult(null);
Command.setData(result, features);
addOutPacket(result);
} // end of if (pc.getType() == StanzaType.get)
processing_result = true;
break;
case STREAM_CLOSED:
fastAddOutPacket(pc.okResult((String)null, 0));
closeConnection(pc.getFrom(), false);
processing_result = true;
break;
case BROADCAST_TO_ONLINE:
String from = pc.getFrom();
boolean trusted = false;
try {
trusted = (from != null && isTrusted(from))
|| (connection != null && isTrusted(connection.getUserId()));
} catch (NotAuthorizedException e) {
trusted = false;
}
try {
if (trusted) {
List<Element> packets = Command.getData(pc);
if (packets != null) {
for (XMPPResourceConnection conn: connectionsByFrom.values()) {
if (conn.isAuthorized()) {
try {
for (Element el_pack: packets) {
Element el_copy = el_pack.clone();
el_copy.setAttribute("to", conn.getJID());
Packet out_packet = new Packet(el_copy);
out_packet.setTo(conn.getConnectionId());
addOutPacket(out_packet);
}
} catch (NotAuthorizedException e) {
log.warning("Something wrong, connection is authenticated but "
+ "NoAuthorizedException is thrown.");
}
}
}
} else {
addOutPacket(Authorization.BAD_REQUEST.getResponseMessage(pc,
"Missing packets for broadcast.", true));
}
} else {
addOutPacket(Authorization.FORBIDDEN.getResponseMessage(pc,
"You don't have enough permission to brodcast packet.", true));
}
} catch (PacketErrorTypeException e) {
log.warning("Packet processing exception: " + e
+ ", packet: " + pc.toString());
}
processing_result = true;
break;
case USER_STATUS:
try {
if (isTrusted(pc.getElemFrom())
|| isTrusted(JIDUtils.getNodeHost(pc.getElemFrom()))) {
String av = Command.getFieldValue(pc, "available");
boolean available = !(av != null && av.equalsIgnoreCase("false"));
if (available) {
Packet presence = null;
Element p = pc.getElement().getChild("command").getChild("presence");
if (p != null) {
// + // use this hack to break XMLNS
// + Element el = new Element("presence");
// + el.setChildren(p.getChildren());
Element elem = p.clone();
elem.setXMLNS("jabber:client");
presence = new Packet(elem);
}
connection = connectionsByFrom.get(pc.getElemFrom());
if (connection == null) {
String user_jid = Command.getFieldValue(pc, "jid");
String hostname = JIDUtils.getNodeHost(user_jid);
connection = createUserSession(pc.getElemFrom(), hostname, user_jid);
connection.setSessionId("USER_STATUS");
user_repository.setData(JIDUtils.getNodeID(user_jid), "tokens",
"USER_STATUS", "USER_STATUS");
connection.loginToken("USER_STATUS", "USER_STATUS");
handleLogin(JIDUtils.getNodeNick(user_jid), connection);
connection.putSessionData("jingle", "active");
fastAddOutPacket(pc.okResult((String)null, 0));
if (presence == null) {
presence =
new Packet(new Element("presence",
new Element[] {
new Element("priority", "-1"),
new Element("c",
new String[] {"node", "ver", "ext", "xmlns"},
new String[] {"http://www.google.com/xmpp/client/caps",
XMPPServer.getImplementationVersion(),
"voice-v1",
"http://jabber.org/protocol/caps"})},
null, null));
}
} else {
// addOutPacket(Authorization.CONFLICT.getResponseMessage(pc,
// "The user resource already exists.", true));
if (log.isLoggable(Level.FINEST)) {
log.finest("USER_STATUS set to true for user who is already available: "
+ pc.toString());
}
}
if (presence != null) {
presence.setFrom(pc.getElemFrom());
presence.setTo(getComponentId());
addOutPacket(presence);
}
} else {
connection = connectionsByFrom.remove(pc.getElemFrom());
if (connection != null) {
closeSession(connection, false);
addOutPacket(pc.okResult((String)null, 0));
} else {
addOutPacket(Authorization.ITEM_NOT_FOUND.getResponseMessage(pc,
"The user resource you want to remove does not exist.", true));
log.info("Can not find resource connection for packet: " +
pc.toString());
}
}
} else {
try {
addOutPacket(Authorization.FORBIDDEN.getResponseMessage(pc,
"Only trusted entity can do it.", true));
} catch (PacketErrorTypeException e) {
log.warning("Packet error type when not expected: " + pc.toString());
}
}
} catch (Exception e) {
try {
addOutPacket(Authorization.UNDEFINED_CONDITION.getResponseMessage(pc,
"Unexpected error occured during the request: " + e, true));
} catch (Exception ex) { ex.printStackTrace(); }
log.log(Level.WARNING, "USER_STATUS session creation error: ", e);
}
processing_result = true;
break;
case REDIRECT:
if (connection != null) {
String action = Command.getFieldValue(pc, "action");
if (action.equals("close")) {
if (log.isLoggable(Level.FINE)) {
log.fine("Closing redirected connections: " + pc.getFrom());
}
sendAllOnHold(connection);
closeConnection(pc.getFrom(), true);
} else {
if (log.isLoggable(Level.FINE)) {
log.fine("Activating redirected connections: " + pc.getFrom());
}
}
} else {
if (log.isLoggable(Level.FINE)) {
log.fine("Redirect for non-existen connection: " + pc.toString());
}
}
processing_result = true;
break;
case OTHER:
String strCommand = pc.getStrCommand();
if (strCommand != null && strCommand.contains(ADMIN_COMMAND_NODE)) {
Command.Action action = Command.getAction(pc);
if (action != Command.Action.cancel) {
boolean admin = false;
try {
admin = connection != null && connection.isAuthorized() &&
isAdmin(connection.getUserId());
if (admin) {
if (log.isLoggable(Level.FINER)) {
log.finer("Processing admin command: " + pc.toString());
}
int hashIdx = strCommand.indexOf('#');
String scriptId = strCommand.substring(hashIdx + 1);
AdminCommandIfc com = adminCommands.get(scriptId);
if (com == null) {
Packet result = pc.commandResult(Command.DataType.result);
Command.addTextField(result, "Error", "The command: " + scriptId +
" is not available yet.");
fastAddOutPacket(result);
} else {
Bindings binds = scriptEngineManager.getBindings();
initBindings(binds);
Queue<Packet> results = new LinkedList<Packet>();
com.runCommand(pc, binds, results);
addOutPackets(results);
}
}
} catch (NotAuthorizedException e) {
admin = false;
} catch (Exception e) {
log.log(Level.WARNING,
"Unknown admin command processing exception: " +
pc.toString(), e);
}
if (!admin) {
try {
if (log.isLoggable(Level.FINER)) {
log.finer("Command rejected non-admin detected: " +
(connection != null ? (connection.isAuthorized() + ": " +
connection.getUserId())
: "null"));
}
addOutPacket(Authorization.FORBIDDEN.getResponseMessage(pc,
"Only Administrator can call the command.", true));
} catch (Exception e) {
log.info("Problem sending FORBIDDEN error: " + e +
", packet: " + pc.toString());
}
}
} else {
Packet result = pc.commandResult(Command.DataType.result);
Command.addTextField(result, "Note", "Command canceled.");
fastAddOutPacket(result);
}
processing_result = true;
} else {
log.info("Other command found: " + pc.getStrCommand());
}
break;
default:
break;
} // end of switch (pc.getCommand())
return processing_result;
}
private void initBindings(Bindings binds) {
binds.put(AdminCommandIfc.ADMN_CMDS, adminCommands);
binds.put(AdminCommandIfc.AUTH_REPO, auth_repository);
binds.put(AdminCommandIfc.USER_CONN, connectionsByFrom);
binds.put(AdminCommandIfc.USER_REPO, user_repository);
binds.put(AdminCommandIfc.USER_SESS, sessionsByNodeId);
binds.put(AdminCommandIfc.ADMN_DISC, serviceEntity);
binds.put(AdminCommandIfc.SCRI_MANA, scriptEngineManager);
}
@SuppressWarnings("unchecked")
protected void sendAllOnHold(XMPPResourceConnection conn) {
String remote_smId = (String)conn.getSessionData("redirect-to");
LinkedList<Packet> packets =
(LinkedList<Packet>)conn.getSessionData(SESSION_PACKETS);
if (remote_smId == null) {
if (log.isLoggable(Level.FINEST)) {
log.finest("No address for remote SM to redirect packets, processing locally.");
}
if (packets != null) {
Packet sess_pack = null;
while (((sess_pack = packets.poll()) != null) &&
// Temporarily fix, need a better solution. For some reason
// the mode has been sent back from normal to on_hold during
// loop execution leading to infinite loop.
// Possibly buggy client sent a second authentication packet
// executing a second handleLogin call....
(conn.getConnectionStatus() != ConnectionStatus.ON_HOLD)) {
processPacket(sess_pack);
}
}
return;
}
conn.setConnectionStatus(ConnectionStatus.REDIRECT);
if (packets != null) {
Packet sess_pack = null;
while ((sess_pack = packets.poll()) != null) {
sess_pack.setTo(remote_smId);
fastAddOutPacket(sess_pack);
}
}
}
protected void closeConnection(String connectionId, boolean closeOnly) {
if (log.isLoggable(Level.FINER)) {
log.finer("Stream closed from: " + connectionId);
}
++closedConnections;
XMPPResourceConnection connection = connectionsByFrom.remove(connectionId);
if (connection != null) {
closeSession(connection, closeOnly);
} else {
log.fine("Can not find resource connection for packet: " + connectionId);
} // end of if (conn != null) else
}
private void closeSession(XMPPResourceConnection conn, boolean closeOnly) {
if (!closeOnly) {
Queue<Packet> results = new LinkedList<Packet>();
for (XMPPStopListenerIfc stopProc: stopListeners.values()) {
stopProc.stopped(conn, results, plugin_config.get(stopProc.id()));
} // end of for ()
addOutPackets(null, conn, results);
}
try {
if (conn.isAuthorized()
|| (conn.getConnectionStatus() == ConnectionStatus.TEMP)) {
String userId = conn.getUserId();
if (log.isLoggable(Level.FINE)) {
log.fine("Closing connection for: " + userId);
}
XMPPSession session = conn.getParentSession();
if (session != null) {
if (log.isLoggable(Level.FINE)) {
log.fine("Found parent session for: " + userId);
}
if (session.getActiveResourcesSize() <= 1) {
session = sessionsByNodeId.remove(userId);
if (session == null) {
log.info("UPS can't remove session, not found in map: " + userId);
} else {
if (log.isLoggable(Level.FINER)) {
log.finer("Number of user sessions: " + sessionsByNodeId.size());
}
} // end of else
auth_repository.logout(userId);
} else {
if (log.isLoggable(Level.FINER)) {
StringBuilder sb = new StringBuilder();
for (XMPPResourceConnection res_con: session.getActiveResources()) {
sb.append(", res=" + res_con.getResource() + " ("
+ res_con.getConnectionStatus() + ")");
}
log.finer("Number of connections is "
+ session.getActiveResourcesSize() + " for the user: " + userId
+ sb.toString());
}
} // end of else
} // end of if (session.getActiveResourcesSize() == 0)
}
} catch (NotAuthorizedException e) {
log.info("Closed not authorized session: " + e);
} catch (Exception e) {
log.log(Level.WARNING, "Exception closing session... ", e);
}
conn.streamClosed();
}
@Override
protected boolean addOutPacket(Packet packet) {
String oldto = packet.getAttribute(Packet.OLDTO);
if (oldto != null) {
packet.getElement().setAttribute("from", oldto);
packet.getElement().removeAttribute(Packet.OLDTO);
}
String oldfrom = packet.getAttribute(Packet.OLDFROM);
if (oldfrom != null) {
packet.getElement().setAttribute("to", oldfrom);
packet.getElement().removeAttribute(Packet.OLDFROM);
}
return super.addOutPacket(packet);
}
protected boolean fastAddOutPacket(Packet packet) {
return super.addOutPacket(packet);
}
protected void addOutPackets(Packet packet, XMPPResourceConnection conn,
Queue<Packet> results) {
for (XMPPPacketFilterIfc outfilter : outFilters.values()) {
outfilter.filter(packet, conn, naUserRepository, results);
} // end of for (XMPPPostprocessorIfc postproc: postProcessors)
addOutPackets(results);
}
// private XMPPSession getXMPPSession(Packet p) {
// return connectionsByFrom.get(p.getFrom()).getParentSession();
// }
private List<Element> getFeatures(XMPPResourceConnection session) {
List<Element> results = new LinkedList<Element>();
for (ProcessorThreads proc_t: processors.values()) {
Element[] features = proc_t.processor.supStreamFeatures(session);
if (features != null) {
results.addAll(Arrays.asList(features));
} // end of if (features != null)
} // end of for ()
return results;
}
@Override
public Map<String, Object> getDefaults(Map<String, Object> params) {
Map<String, Object> props = super.getDefaults(params);
SessionManagerConfig.getDefaults(props, params);
return props;
}
private void addPlugin(String comp_id) {
System.out.println("Loading plugin: " + comp_id + " ...");
XMPPProcessorIfc proc = ProcessorFactory.getProcessor(comp_id);
boolean loaded = false;
if (proc != null) {
ProcessorThreads pt = new ProcessorThreads(proc);
processors.put(comp_id, pt);
log.config("Added processor: " + proc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPPreprocessorIfc preproc = ProcessorFactory.getPreprocessor(comp_id);
if (preproc != null) {
preProcessors.put(comp_id, preproc);
log.config("Added preprocessor: " + preproc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPPostprocessorIfc postproc = ProcessorFactory.getPostprocessor(comp_id);
if (postproc != null) {
postProcessors.put(comp_id, postproc);
log.config("Added postprocessor: " + postproc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPStopListenerIfc stoplist = ProcessorFactory.getStopListener(comp_id);
if (stoplist != null) {
stopListeners.put(comp_id, stoplist);
log.config("Added stopped processor: " + stoplist.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
XMPPPacketFilterIfc filterproc = ProcessorFactory.getPacketFilter(comp_id);
if (filterproc != null) {
outFilters.put(comp_id, filterproc);
log.config("Added packet filter: " + filterproc.getClass().getSimpleName()
+ " for plugin id: " + comp_id);
loaded = true;
}
if (!loaded) {
log.warning("No implementation found for plugin id: " + comp_id);
} // end of if (!loaded)
}
@Override
public void setProperties(Map<String, Object> props) {
super.setProperties(props);
Security.insertProviderAt(new TigaseSaslProvider(), 6);
filter = new PacketFilter();
// Is there a shared user repository pool? If so I want to use it:
user_repository = (UserRepository) props.get(SHARED_USER_REPO_POOL_PROP_KEY);
if (user_repository == null) {
// Is there shared user repository instance? If so I want to use it:
user_repository = (UserRepository) props.get(SHARED_USER_REPO_PROP_KEY);
} else {
log.config("Using shared repository pool.");
}
auth_repository = (UserAuthRepository) props.get(SHARED_AUTH_REPO_PROP_KEY);
if (user_repository != null) {
log.config("Using shared repository instance.");
} else {
Map<String, String> user_repo_params = new LinkedHashMap<String, String>();
Map<String, String> auth_repo_params = new LinkedHashMap<String, String>();
for (Map.Entry<String, Object> entry : props.entrySet()) {
if (entry.getKey().startsWith(USER_REPO_PARAMS_NODE)) {
// Split the key to configuration nodes separated with '/'
String[] nodes = entry.getKey().split("/");
// The plugin ID part may contain many IDs separated with comma ','
if (nodes.length > 1) {
user_repo_params.put(nodes[1], entry.getValue().toString());
}
}
if (entry.getKey().startsWith(AUTH_REPO_PARAMS_NODE)) {
// Split the key to configuration nodes separated with '/'
String[] nodes = entry.getKey().split("/");
// The plugin ID part may contain many IDs separated with comma ','
if (nodes.length > 1) {
auth_repo_params.put(nodes[1], entry.getValue().toString());
}
}
}
try {
String cls_name = (String) props.get(USER_REPO_CLASS_PROP_KEY);
String res_uri = (String) props.get(USER_REPO_URL_PROP_KEY);
user_repository = RepositoryFactory.getUserRepository(getName(),
cls_name, res_uri, user_repo_params);
log.config("Initialized " + cls_name + " as user repository: " + res_uri);
} catch (Exception e) {
log.log(Level.SEVERE, "Can't initialize user repository: ", e);
} // end of try-catch
try {
String cls_name = (String) props.get(AUTH_REPO_CLASS_PROP_KEY);
String res_uri = (String) props.get(AUTH_REPO_URL_PROP_KEY);
auth_repository = RepositoryFactory.getAuthRepository(getName(),
cls_name, res_uri, auth_repo_params);
log.config("Initialized " + cls_name + " as auth repository: " + res_uri);
} catch (Exception e) {
log.log(Level.SEVERE, "Can't initialize auth repository: ", e);
} // end of try-catch
}
naUserRepository = new NARepository(user_repository);
String[] plugins = (String[])props.get(PLUGINS_PROP_KEY);
maxPluginsNo = plugins.length;
processors.clear();
for (String comp_id: plugins) {
if (comp_id.equals("presence")) {
log.warning("Your configuration is outdated!"
+ " Note 'presence' and 'jaber:iq:roster' plugins are no longer exist."
+ " Use 'roster-presence' plugin instead, loading automaticly...");
comp_id = "roster-presence";
}
addPlugin(comp_id);
Map<String, Object> plugin_settings =
new ConcurrentSkipListMap<String, Object>();
for (Map.Entry<String, Object> entry: props.entrySet()) {
if (entry.getKey().startsWith(PLUGINS_CONF_PROP_KEY)) {
// Split the key to configuration nodes separated with '/'
String[] nodes = entry.getKey().split("/");
// The plugin ID part may contain many IDs separated with comma ','
if (nodes.length > 2) {
String[] ids = nodes[1].split(",");
Arrays.sort(ids);
if (Arrays.binarySearch(ids, comp_id) >= 0) {
plugin_settings.put(nodes[2], entry.getValue());
}
}
}
// if (entry.getKey().startsWith(PLUGINS_CONF_PROP_KEY + "/" + comp_id)) {
// plugin_settings.put(
// entry.getKey().substring((PLUGINS_CONF_PROP_KEY +
// "/" + comp_id + "/").length()), entry.getValue());
// }
}
if (plugin_settings.size() > 0) {
if (log.isLoggable(Level.FINEST)) {
log.finest(plugin_settings.toString());
}
plugin_config.put(comp_id, plugin_settings);
}
} // end of for (String comp_id: plugins)
// hostnames = (String[])props.get(HOSTNAMES_PROP_KEY);
// clearRoutings();
// for (String host: hostnames) {
// addRouting(host);
//// XMPPResourceConnection conn = createUserSession(NULL_ROUTING, host, host);
//// conn.setDummy(true);
// } // end of for ()
// addRouting(getComponentId());
// anonymous_domains.clear();
// anonymous_domains.addAll(
// Arrays.asList((String[])props.get(ANONYMOUS_DOMAINS_PROP_KEY)));
// serverSession =
createUserSession(NULL_ROUTING, getDefHostName(),
getComponentId() + "/server");
admins = (String[])props.get(ADMINS_PROP_KEY);
trusted = (String[])props.get(TRUSTED_PROP_KEY);
//anon_peers = (String[])props.get(ANONYMOUS_PEERS_PROP_KEY);
//Arrays.sort(anon_peers);
// Loading admin scripts....
String descrStr = "AS:Description: ";
String cmdIdStr = "AS:CommandId: ";
String scriptsPath = (String) props.get(ADMIN_SCRIPTS_PROP_KEY);
File file = null;
AddScriptCommand addCommand = new AddScriptCommand();
Bindings binds = scriptEngineManager.getBindings();
initBindings(binds);
try {
File adminDir = new File(scriptsPath);
for (File f : adminDir.listFiles()) {
String cmdId = null;
String cmdDescr = null;
file = f;
StringBuilder sb = new StringBuilder();
BufferedReader buffr = new BufferedReader(new FileReader(file));
String line = null;
while ((line = buffr.readLine()) != null) {
sb.append(line + "\n");
int idx = line.indexOf(descrStr);
if (idx >= 0) {
cmdDescr = line.substring(idx + descrStr.length());
}
idx = line.indexOf(cmdIdStr);
if (idx >= 0) {
cmdId = line.substring(idx + cmdIdStr.length());
}
}
buffr.close();
if (cmdId == null || cmdDescr == null) {
log.warning("Admin script found but it has no command ID or command description: " + file);
continue;
}
int idx = file.toString().lastIndexOf(".");
String ext = file.toString().substring(idx + 1);
addCommand.addAdminScript(cmdId, cmdDescr, sb.toString(), null,
ext, binds);
log.config("Loaded admin command from file: " + file +
", id: " + cmdId + ", ext: " + ext + ", descr: " + cmdDescr);
}
} catch (Exception e) {
log.log(Level.WARNING, "Can't load the admin script file: " + file, e);
}
}
@Override
public boolean handlesLocalDomains() {
return true;
}
protected void registerNewSession(String userId, XMPPResourceConnection conn) {
XMPPSession session = sessionsByNodeId.get(userId);
if (session == null) {
session = new XMPPSession(JIDUtils.getNodeNick(userId));
sessionsByNodeId.put(userId, session);
if (log.isLoggable(Level.FINEST)) {
log.finest("Created new XMPPSession for: " + userId);
}
} else {
// Check all other connections whether they are still alive....
List<XMPPResourceConnection> connections = session.getActiveResources();
if (connections != null) {
for (XMPPResourceConnection connection : connections) {
addOutPacketWithTimeout(Command.CHECK_USER_CONNECTION.getPacket(
getComponentId(), connection.getConnectionId(),
StanzaType.get, UUID.randomUUID().toString()),
connectionCheckCommandHandler, 7l, TimeUnit.SECONDS);
}
}
}
session.addResourceConnection(conn);
}
@Override
public void handleLogin(String userName, XMPPResourceConnection conn) {
if (log.isLoggable(Level.FINEST)) {
log.finest("handleLogin called for: " + userName + ", conn_id: " +
conn.getConnectionId());
}
String userId = JIDUtils.getNodeID(userName, conn.getDomain());
registerNewSession(userId, conn);
}
@Override
public void handleLogout(String userName, XMPPResourceConnection conn) {
String domain = conn.getDomain();
String userId = JIDUtils.getNodeID(userName, domain);
XMPPSession session = sessionsByNodeId.get(userId);
if (session != null && session.getActiveResourcesSize() <= 1) {
sessionsByNodeId.remove(userId);
} // end of if (session.getActiveResourcesSize() == 0)
connectionsByFrom.remove(conn.getConnectionId());
fastAddOutPacket(Command.CLOSE.getPacket(getComponentId(),
conn.getConnectionId(), StanzaType.set, conn.nextStanzaId()));
}
@Override
public Element getDiscoInfo(String node, String jid) {
if (jid != null && (getName().equals(JIDUtils.getNodeNick(jid)) ||
isLocalDomain(jid))) {
Element query = serviceEntity.getDiscoInfo(node);
if (node == null) {
for (ProcessorThreads proc_t : processors.values()) {
Element[] discoFeatures = proc_t.processor.supDiscoFeatures(null);
if (discoFeatures != null) {
query.addChildren(Arrays.asList(discoFeatures));
} // end of if (discoFeatures != null)
}
}
if (log.isLoggable(Level.FINEST)) {
log.finest("Found disco info: " +
(query != null ? query.toString() : null));
}
return query;
}
if (log.isLoggable(Level.FINEST)) {
log.finest("Not found disco info for node: " + node + ", jid: " + jid);
}
return null;
}
@Override
public List<Element> getDiscoFeatures() {
List<Element> features = new LinkedList<Element>();
for (ProcessorThreads proc_t: processors.values()) {
Element[] discoFeatures = proc_t.processor.supDiscoFeatures(null);
if (discoFeatures != null) {
features.addAll(Arrays.asList(discoFeatures));
} // end of if (discoFeatures != null)
}
return features;
}
@Override
public List<Element> getDiscoItems(String node, String jid) {
List<Element> result = serviceEntity.getDiscoItems(node, jid);
if (log.isLoggable(Level.FINEST)) {
log.finest("Found disco items: " +
(result != null ? result.toString() : null));
}
return result;
// if (jid != null && getName().equals(JIDUtils.getNodeNick(jid))) {
// return serviceEntity.getDiscoItems(node, jid);
// } else {
// return serviceEntity.getDiscoItems(node, jid);
// }
}
@Override
public List<StatRecord> getStatistics() {
List<StatRecord> stats = super.getStatistics();
stats.add(new StatRecord(getName(), "Open connections", "int",
connectionsByFrom.size(), Level.FINE));
stats.add(new StatRecord(getName(), "Registered accounts", "long",
user_repository.getUsersCount(), Level.INFO));
stats.add(new StatRecord(getName(), "Open authorized sessions", "int",
sessionsByNodeId.size(), Level.INFO));
stats.add(new StatRecord(getName(), "Closed connections", "long",
closedConnections, Level.FINER));
stats.add(new StatRecord(getName(), "Authentication timouts", "long",
authTimeouts, Level.FINEST));
for (Map.Entry<String, ProcessorThreads> procent : processors.entrySet()) {
ProcessorThreads proc = procent.getValue();
if (proc.getName().equals("roster-presence")) {
stats.add(new StatRecord(getName(), "Processor: " + procent.getKey(),
"String", "Queue: " + proc.getTotalQueueSize() +
", AvTime: " + proc.cntAverageTime +
", Runs: " + proc.cntRuns + ", Lost: " + proc.dropedPackets,
Level.INFO));
} else {
stats.add(new StatRecord(getName(), "Processor: " + procent.getKey(),
"String", "Queue: " + proc.getTotalQueueSize() +
", AvTime: " + proc.cntAverageTime +
", Runs: " + proc.cntRuns + ", Lost: " + proc.dropedPackets,
Level.FINEST));
}
}
return stats;
}
@Override
public boolean isLocalDomain(String domain, boolean includeComponents) {
if (includeComponents) {
return isLocalDomainOrComponent(domain);
} else {
return isLocalDomain(domain);
}
}
private class QueueItem {
Packet packet;
XMPPResourceConnection conn;
}
private class ProcessorThreads {
private boolean stopped = false;
private XMPPProcessorIfc processor = null;
private LinkedList<ProcessorWorkerThread> workerThreads =
new LinkedList<ProcessorWorkerThread>();
// private PriorityQueue<QueueItem> nullQueue = new PriorityQueue<QueueItem>(
// Priority.values().length, maxQueueSize / maxPluginsNo);
private ArrayList<PriorityQueue<QueueItem>> queues =
new ArrayList<PriorityQueue<QueueItem>>();
// Packets are put in queues in such a way that all packets for the same
// user end-up in the same queue. This is important in some cases as
// packet processing order does matter in some cases, especially for
// roster processing.
// Therefore it is also recommended that there is a single thread for
// each queue but we can ditribute load increasing number of queues.
private int numQueues = 2;
private int numWorkerThreads = 1;
private int maxQueueSizeDef = maxQueueSize/maxPluginsNo;
private long cntRuns = 0;
private long cntAverageTime = 0;
private long dropedPackets = 0;
public ProcessorThreads(XMPPProcessorIfc processor) {
this.processor = processor;
numQueues = processor.concurrentQueuesNo();
numWorkerThreads = processor.concurrentThreadsPerQueue();
if ("roster-presence".equals(processor.id()) ||
"jabber:iq:auth".equals(processor.id()) ||
"urn:ietf:params:xml:ns:xmpp-sasl".equals(processor.id())) {
maxQueueSizeDef = maxQueueSize * 10;
}
for (int i = 0; i < numQueues; i++) {
queues.add(new PriorityQueue<QueueItem>(Priority.values().length,
maxQueueSizeDef));
for (int j = 0; j < numWorkerThreads; j++) {
ProcessorWorkerThread t = new ProcessorWorkerThread(queues.get(i));
t.setDaemon(true);
t.setName(processor.id() + " Queue " + i + " Worker " + j);
t.start();
workerThreads.add(t);
}
}
// ProcessorWorkerThread t = new ProcessorWorkerThread(nullQueue);
// t.setDaemon(true);
// t.setName(processor.id() + " Null Queue Worker");
// t.start();
// workerThreads.add(t);
}
public String getName() {
return processor.id();
}
private int getTotalQueueSize() {
int ret = 0;
for (PriorityQueue<QueueItem> pq : queues) {
ret += pq.totalSize();
}
// ret += nullQueue.totalSize();
return ret;
}
public boolean addItem(Packet packet, XMPPResourceConnection conn) {
boolean ret = false;
QueueItem item = new QueueItem();
item.conn = conn;
item.packet = packet;
try {
// Queueing packets per user...
ret = queues.get(Math.abs(conn.getUserId().hashCode() %
numQueues)).offer(item, packet.getPriority().ordinal());
} catch (Exception e) {
// Otherwise per destination address
ret = queues.get(Math.abs(packet.getTo().hashCode() %
numQueues)).offer(item, packet.getPriority().ordinal());
//ret = nullQueue.offer(item, packet.getPriority().ordinal());
}
return ret;
}
private void packetDroped() {
++dropedPackets;
}
private class ProcessorWorkerThread
extends Thread {
private LinkedList<Packet> local_results = new LinkedList<Packet>();
private PriorityQueue<QueueItem> queue = null;
private ProcessorWorkerThread(PriorityQueue<QueueItem> queue) {
this.queue = queue;
}
@Override
public void run() {
while (!stopped) {
QueueItem item = null;
try {
//XXX - not very nice, getting the current time can be slooooooow
long start = System.currentTimeMillis();
item = queue.take();
if (item.conn != null) {
// Not sure if this synchronization is needed at all
synchronized (item.conn) {
processor.process(item.packet, item.conn, naUserRepository,
local_results, plugin_config.get(processor.id()));
setPermissions(item.conn, local_results);
}
} else {
processor.process(item.packet, null, naUserRepository,
local_results, plugin_config.get(processor.id()));
}
addOutPackets(item.packet, item.conn, local_results);
++cntRuns;
cntAverageTime =
(cntAverageTime + (System.currentTimeMillis() - start)) / 2;
} catch (PacketErrorTypeException e) {
log.info("Already error packet, ignoring: " + item.packet.toString());
} catch (Exception e) {
log.log(Level.SEVERE, "Exception during packet processing: " +
item.packet.toString(), e);
}
}
}
}
}
private class AuthenticationTimer extends TimerTask {
private String connId = null;
private AuthenticationTimer(String connId) {
this.connId = connId;
}
@Override
public void run() {
XMPPResourceConnection conn = connectionsByFrom.get(connId);
if (conn != null && !conn.isAuthorized()) {
connectionsByFrom.remove(connId);
++authTimeouts;
log.info("Authentication timeout expired, closing connection: " + connId);
fastAddOutPacket(Command.CLOSE.getPacket(getComponentId(),
connId, StanzaType.set, conn.nextStanzaId()));
}
}
}
private class ConnectionCheckCommandHandler implements ReceiverEventHandler {
@Override
public void timeOutExpired(Packet packet) {
if (log.isLoggable(Level.FINER)) {
log.finer("Connection checker timeout expired, closing connection: " +
packet.getTo());
}
closeConnection(packet.getTo(), false);
}
@Override
public void responseReceived(Packet packet, Packet response) {
if (response.getType() == StanzaType.error) {
if (log.isLoggable(Level.FINER)) {
log.finer("Connection checker error received, closing connection: " +
packet.getTo());
}
// The connection is not longer active, closing the user session here.
closeConnection(packet.getTo(), false);
}
}
}
private static class NARepository implements NonAuthUserRepository {
UserRepository rep = null;
NARepository(UserRepository userRep) {
rep = userRep;
}
private String calcNode(String base, String subnode) {
if (subnode == null) {
return base;
} // end of if (subnode == null)
return base + "/" + subnode;
}
@Override
public String getPublicData(String user, String subnode, String key,
String def) throws UserNotFoundException {
try {
return (rep.userExists(user) ?
rep.getData(user, calcNode(PUBLIC_DATA_NODE, subnode), key, def) :
null);
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
return null;
} // end of try-catch
}
@Override
public String[] getPublicDataList(String user, String subnode, String key)
throws UserNotFoundException {
try {
return (rep.userExists(user) ?
rep.getDataList(user, calcNode(PUBLIC_DATA_NODE, subnode), key) :
null);
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
return null;
} // end of try-catch
}
@Override
public void addOfflineDataList(String user, String subnode, String key,
String[] list) throws UserNotFoundException {
try {
if (rep.userExists(user)) {
rep.addDataList(user, calcNode(OFFLINE_DATA_NODE, subnode), key, list);
} else {
throw new UserNotFoundException("User: " + user
+ " has not been found inthe repository.");
}
} catch (UserNotFoundException e) {
// This is quite normal for anonymous users.
log.info("User not found in repository: " + user);
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
} // end of try-catch
}
@Override
public void addOfflineData(String user, String subnode, String key,
String value) throws UserNotFoundException, DataOverwriteException {
String node = calcNode(OFFLINE_DATA_NODE, subnode);
try {
String data = rep.getData(user, node, key);
if (data == null) {
rep.setData(user, node, key, value);
} else {
throw new
DataOverwriteException("Not authorized attempt to overwrite data.");
} // end of if (data == null) else
} catch (TigaseDBException e) {
log.log(Level.SEVERE, "Problem accessing repository data.", e);
} // end of try-catch
}
}
}
| Processor average processing time calculating fixed for multi-threaded processors.
git-svn-id: 4a0daf30c0bbd291b3bc5fe8f058bf11ee523347@1551 7d282ba1-3ae6-0310-8f9b-c9008a0864d2
| src/main/java/tigase/server/xmppsession/SessionManager.java | Processor average processing time calculating fixed for multi-threaded processors. |
|
Java | agpl-3.0 | error: pathspec 'src/markehme/factionsplus/Cmds/req/ReqBansEnabled.java' did not match any file(s) known to git
| 49a7a37835fb970170bf952c1bf71affc906ef43 | 1 | MarkehMe/FactionsPlus | package markehme.factionsplus.Cmds.req;
import markehme.factionsplus.MCore.FPUConf;
import org.bukkit.command.CommandSender;
import com.massivecraft.factions.entity.UPlayer;
import com.massivecraft.mcore.cmd.MCommand;
import com.massivecraft.mcore.cmd.req.ReqAbstract;
public class ReqBansEnabled extends ReqAbstract {
private static final long serialVersionUID = 1L;
// -------------------------------------------- //
// INSTANCE & CONSTRUCT
// -------------------------------------------- //
private static ReqWarpsEnabled i = new ReqWarpsEnabled();
public static ReqWarpsEnabled get() { return i; }
// -------------------------------------------- //
// OVERRIDE
// -------------------------------------------- //
@Override
public boolean apply(CommandSender sender, MCommand command) {
return FPUConf.get(UPlayer.get(sender).getUniverse()).bansEnabled;
}
@Override
public String createErrorMessage(CommandSender sender, MCommand command) {
return "Bans are not enabled.";
}
}
| src/markehme/factionsplus/Cmds/req/ReqBansEnabled.java | New Req: Bans Enabled
| src/markehme/factionsplus/Cmds/req/ReqBansEnabled.java | New Req: Bans Enabled |
|
Java | agpl-3.0 | error: pathspec 'src/experimentalcode/erich/utilities/InspectionUtil.java' did not match any file(s) known to git
| fc3a0556e394eeb17db7f4051d053524cf9c7fe9 | 1 | elki-project/elki,elki-project/elki,elki-project/elki | package experimentalcode.erich.utilities;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Stack;
import java.util.Vector;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import de.lmu.ifi.dbs.elki.logging.LoggingUtil;
/**
* A collection of inspection-related utility functions.
*
* @author Erich Schubert
*
*/
public class InspectionUtil {
/**
* Default package ignores.
*/
private static final String[] DEFAULT_IGNORES = {
// Sun Java
"java.", "com.sun.",
// Batik classes
"org.apache.",
// W3C / SVG / XML classes
"org.w3c.", "org.xml.",
// JUnit
"org.junit.", "junit.", "org.hamcrest."
//
};
/**
* Find all implementations of a given class in the classpath.
*
* Note: returned classes may be abstract.
*
* @param c Class restriction
* @return List of found classes.
*/
public static List<Class<?>> findAllImplementations(Class<?> c) {
String[] classpath = System.getProperty("java.class.path").split(System.getProperty("path.separator"));
return findAllImplementations(classpath, c, DEFAULT_IGNORES);
}
/**
* Find all implementations of a given class.
*
* @param classpath Classpath to use (JARs and folders supported)
* @param c Class restriction
* @param ignorepackages List of packages to ignore
* @return List of found classes.
*/
public static List<Class<?>> findAllImplementations(String[] classpath, Class<?> c, String[] ignorepackages) {
// Collect iterators
Vector<Iterable<String>> iters = new Vector<Iterable<String>>(classpath.length);
for(String path : classpath) {
File p = new File(path);
if(path.endsWith(".jar")) {
iters.add(new JarClassIterator(path));
}
else if(p.isDirectory()) {
iters.add(new DirClassIterator(p));
}
}
ArrayList<Class<?>> res = new ArrayList<Class<?>>();
ClassLoader cl = ClassLoader.getSystemClassLoader();
for(Iterable<String> iter : iters) {
for(String classname : iter) {
boolean ignore = false;
for(String pkg : ignorepackages) {
if(classname.startsWith(pkg)) {
ignore = true;
break;
}
}
if(ignore) {
continue;
}
try {
Class<?> cls = cl.loadClass(classname);
if(c.isAssignableFrom(cls)) {
res.add(cls);
}
}
catch(ClassNotFoundException e) {
continue;
}
catch(NoClassDefFoundError e) {
continue;
}
catch(Exception e) {
continue;
}
}
}
return res;
}
static class JarClassIterator implements Iterator<String>, Iterable<String> {
private Enumeration<JarEntry> jarentries;
private String ne;
/**
* Constructor from Jar file.
*
* @param jarentries Jar file entries to iterate over.
*/
public JarClassIterator(String path) {
try {
JarFile jf = new JarFile(path);
this.jarentries = jf.entries();
this.ne = findNext();
}
catch(IOException e) {
LoggingUtil.exception(e);
this.jarentries = null;
this.ne = null;
}
}
@Override
public boolean hasNext() {
// Do we have a next entry?
return (ne != null);
}
/**
* Find the next entry, since we need to skip some jar file entries.
*
* @return next entry or null
*/
private String findNext() {
while(jarentries.hasMoreElements()) {
JarEntry je = jarentries.nextElement();
String name = je.getName();
if(name.endsWith(".class")) {
String classname = name.substring(0, name.length() - ".class".length());
return classname.replace("/", ".");
}
}
return null;
}
@Override
public String next() {
// Return the previously stored entry.
String ret = ne;
ne = findNext();
return ret;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public Iterator<String> iterator() {
return this;
}
}
static class DirClassIterator implements Iterator<String>, Iterable<String> {
private String prefix;
private Stack<File> set = new Stack<File>();
private String cur;
/**
* Constructor from Directory
*
* @param path Directory to iterate over
*/
public DirClassIterator(File path) {
this.prefix = path.getAbsolutePath();
this.set.push(path);
this.cur = findNext();
}
@Override
public boolean hasNext() {
// Do we have a next entry?
return (cur != null);
}
/**
* Find the next entry, since we need to skip some jar file entries.
*
* @return next entry or null
*/
private String findNext() {
while(set.size() > 0) {
File f = set.pop();
// recurse into directories
if(f.isDirectory()) {
for(File newf : f.listFiles()) {
set.push(newf);
}
continue;
}
String name = f.getAbsolutePath();
if(name.startsWith(prefix)) {
int l = prefix.length();
if(name.charAt(l) == File.separatorChar) {
l += 1;
}
name = name.substring(l);
}
else {
LoggingUtil.warning("I was expecting all directories to start with '" + prefix + "' but '" + name + "' did not.");
}
if(name.endsWith(".class")) {
String classname = name.substring(0, name.length() - ".class".length());
return classname.replace(File.separator, ".");
}
}
return null;
}
@Override
public String next() {
// Return the previously stored entry.
String ret = this.cur;
this.cur = findNext();
return ret;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public Iterator<String> iterator() {
return this;
}
}
/*
public static void main(String[] args) {
for(Class<?> cls : findAllImplementations(Object.class)) {
System.err.println(cls.getCanonicalName());
}
} */
}
| src/experimentalcode/erich/utilities/InspectionUtil.java | Inspection utilities
| src/experimentalcode/erich/utilities/InspectionUtil.java | Inspection utilities |
|
Java | agpl-3.0 | error: pathspec 'src/org/usfirst/frc/team1923/robot/commands/RawDriveCommand.java' did not match any file(s) known to git
| dd8a57acacc0e57fc2b128dae213fe2e01af883e | 1 | Team1923/Stronghold_2016 | package org.usfirst.frc.team1923.robot.commands;
import org.usfirst.frc.team1923.robot.Robot;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.command.Command;
/**
*This command is the MOST low level command in this project.
*
*This directly controls the output of the talons controlling drive.
*This does not have any scaling, colescing, or protection timeouts.
*/
public class RawDriveCommand extends Command {
double left, right, timeOut = 0;
Timer t = new Timer();
public RawDriveCommand(double left, double right, double timeOut) {
requires(Robot.driveSubsystem);
this.left=left;
this.right=right;
this.timeOut=timeOut;
}
// Called just before this Command runs the first time
protected void initialize() {
if(left>1)
left = 1;
else if(left<-1)
left=-1;
if(right>1)
right = 1;
else if(right<-1)
right=-1;
t.reset();
t.start();
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
Robot.driveSubsystem.rawDrive(left, right);
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return (t.get()>timeOut);
}
// Called once after isFinished returns true
protected void end() {
Robot.driveSubsystem.stop();
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
end();
}
}
| src/org/usfirst/frc/team1923/robot/commands/RawDriveCommand.java | Added rawdrive command for experimental code
| src/org/usfirst/frc/team1923/robot/commands/RawDriveCommand.java | Added rawdrive command for experimental code |
|
Java | lgpl-2.1 | 9a33d6aeffe3bcc5b2ccbd2d8ac4b0ed66d1dcf3 | 0 | svartika/ccnx,ebollens/ccnmp,svartika/ccnx,cawka/ndnx,cawka/ndnx,ebollens/ccnmp,ebollens/ccnmp,svartika/ccnx,svartika/ccnx,svartika/ccnx,ebollens/ccnmp,cawka/ndnx,svartika/ccnx,svartika/ccnx,cawka/ndnx,cawka/ndnx | /**
* Part of the CCNx Java Library.
*
* Copyright (C) 2008, 2009 Palo Alto Research Center, Inc.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. You should have received
* a copy of the GNU Lesser General Public License along with this library;
* if not, write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.ccnx.ccn.io;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.util.Arrays;
import java.util.EnumSet;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import org.ccnx.ccn.CCNHandle;
import org.ccnx.ccn.ContentVerifier;
import org.ccnx.ccn.config.SystemConfiguration;
import org.ccnx.ccn.impl.security.crypto.ContentKeys;
import org.ccnx.ccn.impl.support.DataUtils;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.io.content.Link.LinkObject;
import org.ccnx.ccn.profiles.SegmentationProfile;
import org.ccnx.ccn.profiles.VersioningProfile;
import org.ccnx.ccn.profiles.security.access.group.GroupAccessControlManager;
import org.ccnx.ccn.protocol.CCNTime;
import org.ccnx.ccn.protocol.ContentName;
import org.ccnx.ccn.protocol.ContentObject;
import org.ccnx.ccn.protocol.KeyLocator;
import org.ccnx.ccn.protocol.PublisherPublicKeyDigest;
import org.ccnx.ccn.protocol.SignedInfo.ContentType;
/**
* This abstract class is the superclass of all classes representing an input stream of
* bytes segmented and stored in CCN.
*
* @see SegmentationProfile for description of CCN segmentation
*/
public abstract class CCNAbstractInputStream extends InputStream implements ContentVerifier {
/**
* Flags:
* DONT_DEREFERENCE to prevent dereferencing in case we are attempting to read a link.
*/
protected CCNHandle _handle;
/**
* The Link we dereferenced to get here, if any. This may contain
* a link dereferenced to get to it, and so on.
*/
protected LinkObject _dereferencedLink = null;
public enum FlagTypes { DONT_DEREFERENCE };
protected EnumSet<FlagTypes> _flags = EnumSet.noneOf(FlagTypes.class);
/**
* The segment we are currently reading from.
*/
protected ContentObject _currentSegment = null;
/**
* information if the stream we are reading is marked GONE (see ContentType).
*/
protected ContentObject _goneSegment = null;
/**
* Internal stream used for buffering reads. May include filters.
*/
protected InputStream _segmentReadStream = null;
/**
* The name prefix of the segmented stream we are reading, up to (but not including)
* a segment number.
*/
protected ContentName _baseName = null;
/**
* The publisher we are looking for, either specified by querier on initial
* read, or read from previous blocks (for now, we assume that all segments in a
* stream are created by the same publisher).
*/
protected PublisherPublicKeyDigest _publisher = null;
/**
* The segment number to start with. If not specified, is SegmentationProfile#baseSegment().
*/
protected Long _startingSegmentNumber = null;
/**
* The timeout to use for segment retrieval.
*/
protected int _timeout = SystemConfiguration.getDefaultTimeout();
/**
* Encryption/decryption handler.
*/
protected Cipher _cipher;
protected ContentKeys _keys;
/**
* If this content uses Merkle Hash Trees or other bulk signatures to amortize
* signature cost, we can amortize verification cost as well by caching verification
* data as follows: store the currently-verified root signature, so we don't have to re-verify it;
* and the verified root hash. For each piece of incoming content, see if it aggregates
* to the same root, if so don't reverify signature. If not, assume it's part of
* a new tree and change the root.
*/
protected byte [] _verifiedRootSignature = null;
protected byte [] _verifiedProxy = null;
/**
* The key locator of the content publisher as we read it.
*/
protected KeyLocator _publisherKeyLocator;
protected boolean _atEOF = false;
/**
* Used for mark(int) and reset().
*/
protected int _readlimit = 0;
protected int _markOffset = 0;
protected long _markBlock = 0;
/**
* Set up an input stream to read segmented CCN content under a given name.
* Note that this constructor does not currently retrieve any
* data; data is not retrieved until read() is called. This will change in the future, and
* this constructor will retrieve the first block.
*
* @param baseName Name to read from. If contains a segment number, will start to read from that
* segment.
* @param startingSegmentNumber Alternative specification of starting segment number. If
* unspecified, will be SegmentationProfile#baseSegment().
* @param publisher The key we require to have signed this content. If null, will accept any publisher
* (subject to higher-level verification).
* @param keys The keys to use to decrypt this content. Null if content unencrypted, or another
* process will be used to retrieve the keys.
* @param handle The CCN handle to use for data retrieval. If null, the default handle
* given by CCNHandle#getHandle() will be used.
* @throws IOException Not currently thrown, will be thrown when constructors retrieve first block.
*/
public CCNAbstractInputStream(
ContentName baseName, Long startingSegmentNumber,
PublisherPublicKeyDigest publisher,
ContentKeys keys,
EnumSet<FlagTypes> flags,
CCNHandle handle) throws IOException {
super();
if (null == baseName) {
throw new IllegalArgumentException("baseName cannot be null!");
}
_handle = handle;
if (null == _handle) {
_handle = CCNHandle.getHandle();
}
_publisher = publisher;
if (null != keys) {
keys.requireDefaultAlgorithm();
_keys = keys;
}
if (null != flags) {
_flags = flags;
}
// So, we assume the name we get in is up to but not including the sequence
// numbers, whatever they happen to be. If a starting segment is given, we
// open from there, otherwise we open from the leftmost number available.
// We assume by the time you've called this, you have a specific version or
// whatever you want to open -- this doesn't crawl versions. If you don't
// offer a starting segment index, but instead offer the name of a specific
// segment, this will use that segment as the starting segment.
_baseName = baseName;
if (startingSegmentNumber != null) {
_startingSegmentNumber = startingSegmentNumber;
} else {
if (SegmentationProfile.isSegment(baseName)) {
_startingSegmentNumber = SegmentationProfile.getSegmentNumber(baseName);
baseName = _baseName.parent();
} else {
_startingSegmentNumber = SegmentationProfile.baseSegment();
}
}
}
/**
* Set up an input stream to read segmented CCN content starting with a given
* ContentObject that has already been retrieved.
* @param startingSegment The first segment to read from. If this is not the
* first segment of the stream, reading will begin from this point.
* We assume that the signature on this segment was verified by our caller.
* @param keys The keys to use to decrypt this content. Null if content unencrypted, or another
* process will be used to retrieve the keys.
* @param any flags necessary for processing this stream; have to hand in in constructor in case
* first segment provided, so can apply to that segment
* @param handle The CCN handle to use for data retrieval. If null, the default handle
* given by CCNHandle#getHandle() will be used.
* @throws IOException
*/
public CCNAbstractInputStream(ContentObject startingSegment,
ContentKeys keys,
EnumSet<FlagTypes> flags,
CCNHandle handle) throws IOException {
super();
_handle = handle;
if (null == _handle) {
_handle = CCNHandle.getHandle();
}
if (null != keys) {
keys.requireDefaultAlgorithm();
_keys = keys;
}
if (null != flags) {
_flags = flags;
}
_baseName = SegmentationProfile.segmentRoot(startingSegment.name());
try {
_startingSegmentNumber = SegmentationProfile.getSegmentNumber(startingSegment.name());
} catch (NumberFormatException nfe) {
throw new IOException("Stream starter segment name does not contain a valid segment number, so the stream does not know what content to start with.");
}
setFirstSegment(startingSegment);
}
/**
* Set the timeout that will be used for all content retrievals on this stream.
* Default is 5 seconds.
* @param timeout Milliseconds
*/
public void setTimeout(int timeout) {
_timeout = timeout;
}
/**
* Add flags to this stream. Adds to existing flags.
*/
public void addFlags(EnumSet<FlagTypes> additionalFlags) {
_flags.addAll(additionalFlags);
}
/**
* Add a flag to this stream. Adds to existing flags.
*/
public void addFlag(FlagTypes additionalFlag) {
_flags.add(additionalFlag);
}
/**
* Set flags on this stream. Replaces existing flags.
*/
public void setFlags(EnumSet<FlagTypes> flags) {
if (null == flags) {
_flags.clear();
} else {
_flags = flags;
}
}
/**
* Clear the flags on this stream.
*/
public void clearFlags() {
_flags.clear();
}
/**
* Remove a flag from this stream.
*/
public void removeFlag(FlagTypes flag) {
_flags.remove(flag);
}
/**
* Check whether this stream has a particular flag set.
*/
public boolean hasFlag(FlagTypes flag) {
return _flags.contains(flag);
}
/**
* @return The name used to retrieve segments of this stream (not including the segment number).
*/
public ContentName getBaseName() {
return _baseName;
}
/**
* @return The version of the stream being read, if its name is versioned.
*/
public CCNTime getVersion() {
if (null == _baseName)
return null;
return VersioningProfile.getTerminalVersionAsTimestampIfVersioned(_baseName);
}
@Override
public int read() throws IOException {
byte [] b = new byte[1];
if (read(b, 0, 1) < 0) {
return -1;
}
return (0x000000FF & b[0]);
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] buf, int offset, int len) throws IOException {
if (null == buf)
throw new NullPointerException("Buffer cannot be null!");
return readInternal(buf, offset, len);
}
/**
* Actual mechanism used to trigger segment retrieval and perform content reads.
* Subclasses define different schemes for retrieving content across segments.
* @param buf As in read(byte[], int, int).
* @param offset As in read(byte[], int, int).
* @param len As in read(byte[], int, int).
* @return As in read(byte[], int, int).
* @throws IOException if a segment cannot be retrieved, or there is an error in lower-level
* segment retrieval mechanisms. Uses subclasses of IOException to help provide
* more information. In particular, throws NoMatchingContentFoundException when
* no content found within the timeout given.
*/
protected abstract int readInternal(byte [] buf, int offset, int len) throws IOException;
/**
* Called to set the first segment when opening a stream. This does initialization
* and setup particular to the first segment of a stream. Subclasses should not override
* unless they really know what they are doing. Calls #setCurrentSegment(ContentObject)
* for the first segment. If the content is encrypted, and keys are not provided
* for this stream, they are looked up according to the namespace. Note that this
* assumes that all segments of a given piece of content are either encrypted or not.
* @param newSegment Must not be null
* @throws IOException If newSegment is null or decryption keys set up incorrectly
*/
protected void setFirstSegment(ContentObject newSegment) throws IOException {
if (null == newSegment) {
throw new NoMatchingContentFoundException("Cannot find first segment of " + getBaseName());
}
LinkObject theLink = null;
while (newSegment.isType(ContentType.LINK) && (!hasFlag(FlagTypes.DONT_DEREFERENCE))) {
// Automated dereferencing. Want to make a link object to read in this link, then
// dereference it to get the segment we really want. We then fix up the _baseName,
// and continue like nothing ever happened.
theLink = new LinkObject(newSegment, _handle);
pushDereferencedLink(theLink); // set _dereferencedLink to point to the new link, pushing
// old ones down the stack if necessary
// dereference will check for link cycles
newSegment = _dereferencedLink.dereference(_timeout);
Log.info("CCNAbstractInputStream: dereferencing link {0} to {1}, resulting data {2}", theLink.getVersionedName(),
theLink.link(), ((null == newSegment) ? "null" : newSegment.name()));
if (newSegment == null) {
// TODO -- catch error states. Do we throw exception or return null?
// Set error states -- when do we find link cycle and set the error on the link?
// Clear error state when update is successful.
// Two cases -- link loop or data not found.
if (_dereferencedLink.hasError()) {
if (_dereferencedLink.getError() instanceof LinkCycleException) {
// Leave the link set on the input stream, so that caller can explore errors.
Log.warning("Hit link cycle on link {0} pointing to {1}, cannot dereference. See this.dereferencedLink() for more information!",
_dereferencedLink.getVersionedName(), _dereferencedLink.link().targetName());
}
// Might also cover NoMatchingContentFoundException here...for now, just return null
// so can call it more than once.
throw _dereferencedLink.getError();
} else {
throw new NoMatchingContentFoundException("Cannot find first segment of " + getBaseName() + ", which is a link pointing to " + _dereferencedLink.link().targetName());
}
}
_baseName = SegmentationProfile.segmentRoot(newSegment.name());
// go around again,
}
if (newSegment.isType(ContentType.GONE)) {
_goneSegment = newSegment;
Log.info("getFirstSegment: got gone segment: " + _goneSegment.name());
} else if (newSegment.isType(ContentType.ENCR) && (null == _keys)) {
// The block is encrypted and we don't have keys
// Get the content name without the segment parent
ContentName contentName = SegmentationProfile.segmentRoot(newSegment.name());
// Attempt to retrieve the keys for this namespace
_keys = GroupAccessControlManager.keysForInput(contentName, _handle);
}
setCurrentSegment(newSegment);
}
/**
* Set up current segment for reading, including preparation for decryption if necessary.
* Called after getSegment/getFirstSegment/getNextSegment, which take care of verifying
* the segment for us. Assumes newSegment has been verified.
* @throws IOException If decryption keys set up incorrectly
*/
protected void setCurrentSegment(ContentObject newSegment) throws IOException {
_currentSegment = null;
_segmentReadStream = null;
if (null == newSegment) {
Log.info("FINDME: Setting current segment to null! Did a segment fail to verify?");
return;
}
_currentSegment = newSegment;
// Should we only set these on the first retrieval?
// getSegment will ensure we get a requested publisher (if we have one) for the
// first segment; once we have a publisher, it will ensure that future segments match it.
_publisher = newSegment.signedInfo().getPublisherKeyID();
_publisherKeyLocator = newSegment.signedInfo().getKeyLocator();
if (_goneSegment != newSegment) { // want pointer ==, not equals() here
// if we're decrypting, then set it up now
if (_keys != null) {
// We only do automated lookup of keys on first segment. Otherwise
// we assume we must have the keys or don't try to decrypt.
try {
// Reuse of current segment OK. Don't expect to have two separate readers
// independently use this stream without state confusion anyway.
// Assume getBaseName() returns name without segment information.
// Log verification only on highest log level (won't execute on lower logging level).
Log.finest("Assert check: does getBaseName() match segmentless part of _currentSegment.name()? {0}",
(SegmentationProfile.segmentRoot(_currentSegment.name()).equals(getBaseName())));
_cipher = _keys.getSegmentDecryptionCipher(getBaseName(), _publisher,
SegmentationProfile.getSegmentNumber(_currentSegment.name()));
} catch (InvalidKeyException e) {
Log.warning("InvalidKeyException: " + e.getMessage());
throw new IOException("InvalidKeyException: " + e.getMessage());
} catch (InvalidAlgorithmParameterException e) {
Log.warning("InvalidAlgorithmParameterException: " + e.getMessage());
throw new IOException("InvalidAlgorithmParameterException: " + e.getMessage());
}
// Let's optimize random access to this buffer (e.g. as used by the decoders) by
// decrypting a whole ContentObject at a time. It's not a huge security risk,
// and right now we can't rewind the buffers so if we do try to decode out of
// an encrypted block we constantly restart from the beginning and redecrypt
// the content.
// Previously we used our own UnbufferedCipherInputStream class directly as
// our _segmentReadStream for encrypted data, as Java's CipherInputStreams
// assume block-oriented boundaries for decryption, and buffer incorrectly as a result.
// If we want to go back to incremental decryption, putting a small cache into that
// class to optimize going backwards would help.
// Unless we use a compressing cipher, the maximum data length for decrypted data
// is _currentSegment.content().length. But we might as well make something
// general that will handle all cases. There may be a more efficient way to
// do this; want to minimize copies.
byte [] bodyData = _cipher.update(_currentSegment.content());
byte[] tailData;
try {
tailData = _cipher.doFinal();
} catch (IllegalBlockSizeException e) {
Log.warning("IllegalBlockSizeException: " + e.getMessage());
throw new IOException("IllegalBlockSizeException: " + e.getMessage());
} catch (BadPaddingException e) {
Log.warning("BadPaddingException: " + e.getMessage());
throw new IOException("BadPaddingException: " + e.getMessage());
}
if ((null == tailData) || (0 == tailData.length)) {
_segmentReadStream = new ByteArrayInputStream(bodyData);
} else {
byte [] allData = new byte[bodyData.length + tailData.length];
// Still avoid 1.6 array ops
System.arraycopy(bodyData, 0, allData, 0, bodyData.length);
System.arraycopy(tailData, 0, allData, bodyData.length, tailData.length);
_segmentReadStream = new ByteArrayInputStream(allData);
}
} else {
if (_currentSegment.signedInfo().getType().equals(ContentType.ENCR)) {
// We only do automated lookup of keys on first segment.
Log.warning("Asked to read encrypted content, but not given a key to decrypt it. Decryption happening at higher level?");
}
_segmentReadStream = new ByteArrayInputStream(_currentSegment.content());
}
}
}
/**
* Rewinds read buffers for current segment to beginning of the segment.
* @throws IOException
*/
protected void rewindSegment() throws IOException {
if (null == _currentSegment) {
Log.info("Cannot reqind null segment.");
}
if (null == _segmentReadStream) {
setCurrentSegment(_currentSegment);
}
_segmentReadStream.reset(); // will reset to 0 if mark not caled
}
/**
* Retrieves a specific segment of this stream, indicated by segment number.
* Three navigation options: get first (leftmost) segment, get next segment,
* or get a specific segment.
* Have to assume that everyone is using our segment number encoding. Probably
* easier to ask raw streams to use that encoding (e.g. for packet numbers)
* than to flag streams as to whether they are using integers or segments.
* @param number Segment number to retrieve. See SegmentationProfile for numbering.
* If we already have this segment as #currentSegmentNumber(), will just
* return the current segment, and will not re-retrieve it from the network.
* @throws IOException If no matching content found (actually throws NoMatchingContentFoundException)
* or if there is an error at lower layers.
**/
protected ContentObject getSegment(long number) throws IOException {
if (_currentSegment != null) {
// what segment do we have right now? maybe we already have it
if (currentSegmentNumber() == number){
// we already have this segment... just use it
return _currentSegment;
}
}
// If no publisher specified a priori, _publisher will be null and we will get whoever is
// available that verifies for first segment. If _publisher specified a priori, or once we have
// retrieved a segment and set _publisher to the publisher of that segment, we will continue to
// retrieve segments by the same publisher.
return SegmentationProfile.getSegment(_baseName, number, _publisher, _timeout, this, _handle);
}
/**
* Checks whether we might have a next segment.
* @return Returns false if this content is marked as GONE (see ContentType), or if we have
* retrieved the segment marked as the last one, or, in a very rare case, if we're
* reading content that does not have segment markers.
*/
protected boolean hasNextSegment() throws IOException {
// We're looking at content marked GONE
if (null != _goneSegment) {
Log.info("getNextSegment: We have a gone segment, no next segment. Gone segment: " + _goneSegment.name());
return false;
}
if (null == _currentSegment) {
Log.severe("hasNextSegment() called when we have no current segment!");
throw new IOException("hasNextSegment() called when we have no current segment!");
}
// Check to see if finalBlockID is the current segment. If so, there should
// be no next segment. (If the writer makes a mistake and guesses the wrong
// value for finalBlockID, they won't put that wrong value in the segment they're
// guessing itself -- unless they want to try to extend a "closed" stream.
// Normally by the time they write that segment, they either know they're done or not.
if (null != _currentSegment.signedInfo().getFinalBlockID()) {
if (Arrays.equals(_currentSegment.signedInfo().getFinalBlockID(), _currentSegment.name().lastComponent())) {
Log.info("getNextSegment: there is no next segment. We have segment: " +
DataUtils.printHexBytes(_currentSegment.name().lastComponent()) + " which is marked as the final segment.");
return false;
}
}
if (!SegmentationProfile.isSegment(_currentSegment.name())) {
Log.info("Unsegmented content: {0}. No next segment.", _currentSegment.name());
return false;
}
return true;
}
/**
* Retrieve the next segment of the stream. Convenience method, uses #getSegment(long).
* @return the next segment, if found.
* @throws IOException
*/
protected ContentObject getNextSegment() throws IOException {
if (null == _currentSegment) {
Log.info("getNextSegment: no current segment, getting first segment.");
return getFirstSegment();
}
Log.info("getNextSegment: getting segment after " + _currentSegment.name());
return getSegment(nextSegmentNumber());
}
/**
* Retrieves the first segment of the stream, based on specified startingSegmentNumber
* (see #CCNAbstractInputStream(ContentName, Long, PublisherPublicKeyDigest, ContentKeys, CCNHandle)).
* Convenience method, uses #getSegment(long).
* @return the first segment, if found.
* @throws IOException If can't get a valid starting segment number
*/
protected ContentObject getFirstSegment() throws IOException {
if (null != _startingSegmentNumber) {
ContentObject firstSegment = getSegment(_startingSegmentNumber);
Log.info("getFirstSegment: segment number: " + _startingSegmentNumber + " got segment? " +
((null == firstSegment) ? "no " : firstSegment.name()));
return firstSegment;
} else {
throw new IOException("Stream does not have a valid starting segment number.");
}
}
/**
* Method to determine whether a retrieved block is the first segment of this stream (as
* specified by startingSegmentNumber, (see #CCNAbstractInputStream(ContentName, Long, PublisherPublicKeyDigest, ContentKeys, CCNHandle)).
* Overridden by subclasses to implement narrower constraints on names. Once first
* segment is retrieved, further segments can be identified just by segment-naming
* conventions (see SegmentationProfile).
*
* @param desiredName The expected name prefix for the stream.
* For CCNAbstractInputStream, assume that desiredName contains the name up to but not including
* segmentation information.
* @param segment The potential first segment.
* @return True if it is the first segment, false otherwise.
*/
protected boolean isFirstSegment(ContentName desiredName, ContentObject segment) {
if ((null != segment) && (SegmentationProfile.isSegment(segment.name()))) {
Log.info("is " + segment.name() + " a first segment of " + desiredName);
// In theory, the segment should be at most a versioning component different from desiredName.
// In the case of complex segmented objects (e.g. a KeyDirectory), where there is a version,
// then some name components, then a segment, desiredName should contain all of those other
// name components -- you can't use the usual versioning mechanisms to pull first segment anyway.
if (!desiredName.equals(SegmentationProfile.segmentRoot(segment.name()))) {
Log.info("Desired name :" + desiredName + " is not a prefix of segment: " + segment.name());
return false;
}
if (null != _startingSegmentNumber) {
return (_startingSegmentNumber.equals(SegmentationProfile.getSegmentNumber(segment.name())));
} else {
return SegmentationProfile.isFirstSegment(segment.name());
}
}
return false;
}
/**
* If we traversed a link to get this object, make it available.
*/
public synchronized LinkObject getDereferencedLink() { return _dereferencedLink; }
/**
* Use only if you know what you are doing.
*/
protected synchronized void setDereferencedLink(LinkObject dereferencedLink) { _dereferencedLink = dereferencedLink; }
/**
* Add a LinkObject to the stack we had to dereference to get here.
*/
protected synchronized void pushDereferencedLink(LinkObject dereferencedLink) {
if (null == dereferencedLink) {
return;
}
if (null != _dereferencedLink) {
if (null != dereferencedLink.getDereferencedLink()) {
Log.warning("Merging two link stacks -- {0} already has a dereferenced link from {1}. Behavior unpredictable.",
dereferencedLink.getVersionedName(), dereferencedLink.getDereferencedLink().getVersionedName());
}
dereferencedLink.pushDereferencedLink(_dereferencedLink);
}
setDereferencedLink(dereferencedLink);
}
/**
* Verifies the signature on a segment using cached bulk signature data (from Merkle Hash Trees)
* if it is available.
* TODO -- check to see if it matches desired publisher.
* @param segment the segment whose signature to verify in the context of this stream.
*/
public boolean verify(ContentObject segment) {
// First we verify.
// Low-level verify just checks that signer actually signed.
// High-level verify checks trust.
try {
// We could have several options here. This segment could be simply signed.
// or this could be part of a Merkle Hash Tree. If the latter, we could
// already have its signing information.
if (null == segment.signature().witness()) {
return segment.verify(_handle.keyManager());
}
// Compare to see whether this segment matches the root signature we previously verified, if
// not, verify and store the current signature.
// We need to compute the proxy regardless.
byte [] proxy = segment.computeProxy();
// OK, if we have an existing verified signature, and it matches this segment's
// signature, the proxy ought to match as well.
if ((null != _verifiedRootSignature) && (Arrays.equals(_verifiedRootSignature, segment.signature().signature()))) {
if ((null == proxy) || (null == _verifiedProxy) || (!Arrays.equals(_verifiedProxy, proxy))) {
Log.warning("Found segment: " + segment.name() + " whose digest fails to verify; segment length: " + segment.contentLength());
Log.info("Verification failure: " + segment.name() + " timestamp: " + segment.signedInfo().getTimestamp() + " content length: " + segment.contentLength() +
" proxy: " + DataUtils.printBytes(proxy) +
" expected proxy: " + DataUtils.printBytes(_verifiedProxy));
return false;
}
} else {
// Verifying a new segment. See if the signature verifies, otherwise store the signature
// and proxy.
if (!ContentObject.verify(proxy, segment.signature().signature(), segment.signedInfo(), segment.signature().digestAlgorithm(), _handle.keyManager())) {
Log.warning("Found segment: " + segment.name().toString() + " whose signature fails to verify; segment length: " + segment.contentLength() + ".");
return false;
} else {
// Remember current verifiers
_verifiedRootSignature = segment.signature().signature();
_verifiedProxy = proxy;
}
}
Log.info("Got segment: " + segment.name().toString() + ", verified.");
} catch (Exception e) {
Log.warning("Got an " + e.getClass().getName() + " exception attempting to verify segment: " + segment.name().toString() + ", treat as failure to verify.");
Log.warningStackTrace(e);
return false;
}
return true;
}
/**
* Returns the segment number for the next segment.
* Default segmentation generates sequentially-numbered stream
* segments but this method may be overridden in subclasses to
* perform re-assembly on streams that have been segmented differently.
* @return The index of the next segment of stream data.
*/
public long nextSegmentNumber() {
if (null == _currentSegment) {
return _startingSegmentNumber.longValue();
} else {
return segmentNumber() + 1;
}
}
/**
* @return Returns the segment number of the current segment if we have one, otherwise
* the expected startingSegmentNumber.
*/
public long segmentNumber() {
if (null == _currentSegment) {
return _startingSegmentNumber;
} else {
// This needs to work on streaming content that is not traditional fragments.
// The segmentation profile tries to do that, though it is seeming like the
// new segment representation means we will have to assume that representation
// even for stream content.
return SegmentationProfile.getSegmentNumber(_currentSegment.name());
}
}
/**
* @return Returns the segment number of the current segment if we have one, otherwise -1.
*/
protected long currentSegmentNumber() {
if (null == _currentSegment) {
return -1; // make sure we don't match inappropriately
}
return segmentNumber();
}
/**
* Checks to see whether this content has been marked as GONE (deleted). Will retrieve the first
* segment if we do not already have it in order to make this determination.
* @return true if stream is GONE.
* @throws NoMatchingContentFound exception if no first segment found
* @throws IOException if there is other difficulty retrieving the first segment.
*/
public boolean isGone() throws NoMatchingContentFoundException, IOException {
// TODO: once first segment is always read in constructor this code will change
if (null == _currentSegment) {
ContentObject firstSegment = getFirstSegment();
setFirstSegment(firstSegment); // sets _goneSegment, does link dereferencing,
// throws NoMatchingContentFoundException if firstSegment is null.
// this way all retry behavior is localized in the various versions of getFirstSegment.
// Previously what would happen is getFirstSegment would be called by isGone, return null,
// and we'd have a second chance to catch it on the call to update if things were slow. But
// that means we would get a more general update on a gone object.
}
// We might have set first segment in constructor, in which case we will also have set _goneSegment
if (null != _goneSegment) {
return true;
}
return false;
}
/**
*
* @return Return the single segment of a stream marked as GONE.
*/
public ContentObject deletionInformation() {
return _goneSegment;
}
/**
* Callers may need to access information about this stream's publisher.
* We eventually should (TODO) ensure that all the segments we're reading
* match in publisher information, and cache the verified publisher info.
* (In particular once we're doing trust calculations, to ensure we do them
* only once per stream.)
* But we do verify each segment, so start by pulling what's in the current segment.
* @return the publisher of the data in the stream (either as requested, or once we have
* data, as observed).
*/
public PublisherPublicKeyDigest publisher() {
return _publisher;
}
/**
* @return the key locator for this stream's publisher.
*/
public KeyLocator publisherKeyLocator() {
return _publisherKeyLocator;
}
/**
* @return the name of the current segment held by this string, or "null". Used for debugging.
*/
public String currentSegmentName() {
return ((null == _currentSegment) ? "null" : _currentSegment.name().toString());
}
@Override
public int available() throws IOException {
if (null == _segmentReadStream)
return 0;
return _segmentReadStream.available();
}
/**
* @return Whether this stream believes it is at eof (has read past the end of the
* last segment of the stream).
*/
public boolean eof() {
//Log.info("Checking eof: there yet? " + _atEOF);
return _atEOF;
}
@Override
public void close() throws IOException {
// don't have to do anything.
}
@Override
public synchronized void mark(int readlimit) {
_readlimit = readlimit;
_markBlock = segmentNumber();
if (null == _segmentReadStream) {
_markOffset = 0;
} else {
try {
_markOffset = _currentSegment.contentLength() - _segmentReadStream.available();
if (_segmentReadStream.markSupported()) {
_segmentReadStream.mark(readlimit);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
Log.finer("mark: block: " + segmentNumber() + " offset: " + _markOffset);
}
@Override
public boolean markSupported() {
return true;
}
@Override
public synchronized void reset() throws IOException {
// TODO: when first block is read in constructor this check can be removed
if (_currentSegment == null) {
setFirstSegment(getSegment(_markBlock));
} else if (currentSegmentNumber() == _markBlock) {
//already have the correct segment
if (tell() == _markOffset){
//already have the correct offset
} else {
// Reset and skip.
if (_segmentReadStream.markSupported()) {
_segmentReadStream.reset();
Log.finer("reset within block: block: " + segmentNumber() + " offset: " + _markOffset + " eof? " + _atEOF);
return;
} else {
setCurrentSegment(_currentSegment);
}
}
} else {
// getSegment doesn't pull segment if we already have the right one
setCurrentSegment(getSegment(_markBlock));
}
_segmentReadStream.skip(_markOffset);
_atEOF = false;
Log.finer("reset: block: " + segmentNumber() + " offset: " + _markOffset + " eof? " + _atEOF);
}
@Override
public long skip(long n) throws IOException {
Log.info("in skip("+n+")");
if (n < 0) {
return 0;
}
return readInternal(null, 0, (int)n);
}
/**
* @return Currently returns 0. Can be optionally overridden by subclasses.
* @throws IOException
*/
protected int segmentCount() throws IOException {
return 0;
}
/**
* Seek a stream to a specific byte offset from the start. Tries to avoid retrieving
* extra segments.
* @param position
* @throws IOException
*/
public void seek(long position) throws IOException {
Log.info("Seeking stream to " + position);
// TODO: when first block is read in constructor this check can be removed
if ((_currentSegment == null) || (!SegmentationProfile.isFirstSegment(_currentSegment.name()))) {
setFirstSegment(getFirstSegment());
skip(position);
} else if (position > tell()) {
// we are on the first segment already, just move forward
skip(position - tell());
} else {
// we are on the first segment already, just rewind back to the beginning
rewindSegment();
skip(position);
}
}
/**
* @return Returns position in byte offset. For CCNAbstractInputStream, provide an inadequate
* base implementation that returns the offset into the current segment (not the stream as
* a whole).
* @throws IOException
*/
public long tell() throws IOException {
return _currentSegment.contentLength() - _segmentReadStream.available();
}
/**
* @return Total length of the stream, if known, otherwise -1.
* @throws IOException
*/
public long length() throws IOException {
return -1;
}
} | javasrc/src/org/ccnx/ccn/io/CCNAbstractInputStream.java | /**
* Part of the CCNx Java Library.
*
* Copyright (C) 2008, 2009 Palo Alto Research Center, Inc.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. You should have received
* a copy of the GNU Lesser General Public License along with this library;
* if not, write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.ccnx.ccn.io;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.util.Arrays;
import java.util.EnumSet;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import org.ccnx.ccn.CCNHandle;
import org.ccnx.ccn.ContentVerifier;
import org.ccnx.ccn.config.SystemConfiguration;
import org.ccnx.ccn.impl.security.crypto.ContentKeys;
import org.ccnx.ccn.impl.support.DataUtils;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.io.content.Link.LinkObject;
import org.ccnx.ccn.profiles.SegmentationProfile;
import org.ccnx.ccn.profiles.VersioningProfile;
import org.ccnx.ccn.profiles.security.access.group.GroupAccessControlManager;
import org.ccnx.ccn.protocol.CCNTime;
import org.ccnx.ccn.protocol.ContentName;
import org.ccnx.ccn.protocol.ContentObject;
import org.ccnx.ccn.protocol.KeyLocator;
import org.ccnx.ccn.protocol.PublisherPublicKeyDigest;
import org.ccnx.ccn.protocol.SignedInfo.ContentType;
/**
* This abstract class is the superclass of all classes representing an input stream of
* bytes segmented and stored in CCN.
*
* @see SegmentationProfile for description of CCN segmentation
*/
public abstract class CCNAbstractInputStream extends InputStream implements ContentVerifier {
/**
* Flags:
* DONT_DEREFERENCE to prevent dereferencing in case we are attempting to read a link.
*/
protected CCNHandle _handle;
/**
* The Link we dereferenced to get here, if any. This may contain
* a link dereferenced to get to it, and so on.
*/
protected LinkObject _dereferencedLink = null;
public enum FlagTypes { DONT_DEREFERENCE };
protected EnumSet<FlagTypes> _flags = EnumSet.noneOf(FlagTypes.class);
/**
* The segment we are currently reading from.
*/
protected ContentObject _currentSegment = null;
/**
* information if the stream we are reading is marked GONE (see ContentType).
*/
protected ContentObject _goneSegment = null;
/**
* Internal stream used for buffering reads. May include filters.
*/
protected InputStream _segmentReadStream = null;
/**
* The name prefix of the segmented stream we are reading, up to (but not including)
* a segment number.
*/
protected ContentName _baseName = null;
/**
* The publisher we are looking for, either specified by querier on initial
* read, or read from previous blocks (for now, we assume that all segments in a
* stream are created by the same publisher).
*/
protected PublisherPublicKeyDigest _publisher = null;
/**
* The segment number to start with. If not specified, is SegmentationProfile#baseSegment().
*/
protected Long _startingSegmentNumber = null;
/**
* The timeout to use for segment retrieval.
*/
protected int _timeout = SystemConfiguration.getDefaultTimeout();
/**
* Encryption/decryption handler.
*/
protected Cipher _cipher;
protected ContentKeys _keys;
/**
* If this content uses Merkle Hash Trees or other bulk signatures to amortize
* signature cost, we can amortize verification cost as well by caching verification
* data as follows: store the currently-verified root signature, so we don't have to re-verify it;
* and the verified root hash. For each piece of incoming content, see if it aggregates
* to the same root, if so don't reverify signature. If not, assume it's part of
* a new tree and change the root.
*/
protected byte [] _verifiedRootSignature = null;
protected byte [] _verifiedProxy = null;
/**
* The key locator of the content publisher as we read it.
*/
protected KeyLocator _publisherKeyLocator;
protected boolean _atEOF = false;
/**
* Used for mark(int) and reset().
*/
protected int _readlimit = 0;
protected int _markOffset = 0;
protected long _markBlock = 0;
/**
* Set up an input stream to read segmented CCN content under a given name.
* Note that this constructor does not currently retrieve any
* data; data is not retrieved until read() is called. This will change in the future, and
* this constructor will retrieve the first block.
*
* @param baseName Name to read from. If contains a segment number, will start to read from that
* segment.
* @param startingSegmentNumber Alternative specification of starting segment number. If
* unspecified, will be SegmentationProfile#baseSegment().
* @param publisher The key we require to have signed this content. If null, will accept any publisher
* (subject to higher-level verification).
* @param keys The keys to use to decrypt this content. Null if content unencrypted, or another
* process will be used to retrieve the keys.
* @param handle The CCN handle to use for data retrieval. If null, the default handle
* given by CCNHandle#getHandle() will be used.
* @throws IOException Not currently thrown, will be thrown when constructors retrieve first block.
*/
public CCNAbstractInputStream(
ContentName baseName, Long startingSegmentNumber,
PublisherPublicKeyDigest publisher,
ContentKeys keys,
EnumSet<FlagTypes> flags,
CCNHandle handle) throws IOException {
super();
if (null == baseName) {
throw new IllegalArgumentException("baseName cannot be null!");
}
_handle = handle;
if (null == _handle) {
_handle = CCNHandle.getHandle();
}
_publisher = publisher;
if (null != keys) {
keys.requireDefaultAlgorithm();
_keys = keys;
}
if (null != flags) {
_flags = flags;
}
// So, we assume the name we get in is up to but not including the sequence
// numbers, whatever they happen to be. If a starting segment is given, we
// open from there, otherwise we open from the leftmost number available.
// We assume by the time you've called this, you have a specific version or
// whatever you want to open -- this doesn't crawl versions. If you don't
// offer a starting segment index, but instead offer the name of a specific
// segment, this will use that segment as the starting segment.
_baseName = baseName;
if (startingSegmentNumber != null) {
_startingSegmentNumber = startingSegmentNumber;
} else {
if (SegmentationProfile.isSegment(baseName)) {
_startingSegmentNumber = SegmentationProfile.getSegmentNumber(baseName);
baseName = _baseName.parent();
} else {
_startingSegmentNumber = SegmentationProfile.baseSegment();
}
}
}
/**
* Set up an input stream to read segmented CCN content starting with a given
* ContentObject that has already been retrieved.
* @param startingSegment The first segment to read from. If this is not the
* first segment of the stream, reading will begin from this point.
* We assume that the signature on this segment was verified by our caller.
* @param keys The keys to use to decrypt this content. Null if content unencrypted, or another
* process will be used to retrieve the keys.
* @param any flags necessary for processing this stream; have to hand in in constructor in case
* first segment provided, so can apply to that segment
* @param handle The CCN handle to use for data retrieval. If null, the default handle
* given by CCNHandle#getHandle() will be used.
* @throws IOException
*/
public CCNAbstractInputStream(ContentObject startingSegment,
ContentKeys keys,
EnumSet<FlagTypes> flags,
CCNHandle handle) throws IOException {
super();
_handle = handle;
if (null == _handle) {
_handle = CCNHandle.getHandle();
}
if (null != keys) {
keys.requireDefaultAlgorithm();
_keys = keys;
}
if (null != flags) {
_flags = flags;
}
_baseName = SegmentationProfile.segmentRoot(startingSegment.name());
try {
_startingSegmentNumber = SegmentationProfile.getSegmentNumber(startingSegment.name());
} catch (NumberFormatException nfe) {
throw new IOException("Stream starter segment name does not contain a valid segment number, so the stream does not know what content to start with.");
}
setFirstSegment(startingSegment);
}
/**
* Set the timeout that will be used for all content retrievals on this stream.
* Default is 5 seconds.
* @param timeout Milliseconds
*/
public void setTimeout(int timeout) {
_timeout = timeout;
}
/**
* Add flags to this stream. Adds to existing flags.
*/
public void addFlags(EnumSet<FlagTypes> additionalFlags) {
_flags.addAll(additionalFlags);
}
/**
* Add a flag to this stream. Adds to existing flags.
*/
public void addFlag(FlagTypes additionalFlag) {
_flags.add(additionalFlag);
}
/**
* Set flags on this stream. Replaces existing flags.
*/
public void setFlags(EnumSet<FlagTypes> flags) {
if (null == flags) {
_flags.clear();
} else {
_flags = flags;
}
}
/**
* Clear the flags on this stream.
*/
public void clearFlags() {
_flags.clear();
}
/**
* Remove a flag from this stream.
*/
public void removeFlag(FlagTypes flag) {
_flags.remove(flag);
}
/**
* Check whether this stream has a particular flag set.
*/
public boolean hasFlag(FlagTypes flag) {
return _flags.contains(flag);
}
/**
* @return The name used to retrieve segments of this stream (not including the segment number).
*/
public ContentName getBaseName() {
return _baseName;
}
/**
* @return The version of the stream being read, if its name is versioned.
*/
public CCNTime getVersion() {
if (null == _baseName)
return null;
return VersioningProfile.getTerminalVersionAsTimestampIfVersioned(_baseName);
}
@Override
public int read() throws IOException {
byte [] b = new byte[1];
if (read(b, 0, 1) < 0) {
return -1;
}
return (0x000000FF & b[0]);
}
@Override
public int read(byte[] b) throws IOException {
return read(b, 0, b.length);
}
@Override
public int read(byte[] buf, int offset, int len) throws IOException {
if (null == buf)
throw new NullPointerException("Buffer cannot be null!");
return readInternal(buf, offset, len);
}
/**
* Actual mechanism used to trigger segment retrieval and perform content reads.
* Subclasses define different schemes for retrieving content across segments.
* @param buf As in read(byte[], int, int).
* @param offset As in read(byte[], int, int).
* @param len As in read(byte[], int, int).
* @return As in read(byte[], int, int).
* @throws IOException if a segment cannot be retrieved, or there is an error in lower-level
* segment retrieval mechanisms. Uses subclasses of IOException to help provide
* more information. In particular, throws NoMatchingContentFoundException when
* no content found within the timeout given.
*/
protected abstract int readInternal(byte [] buf, int offset, int len) throws IOException;
/**
* Called to set the first segment when opening a stream. This does initialization
* and setup particular to the first segment of a stream. Subclasses should not override
* unless they really know what they are doing. Calls #setCurrentSegment(ContentObject)
* for the first segment. If the content is encrypted, and keys are not provided
* for this stream, they are looked up according to the namespace. Note that this
* assumes that all segments of a given piece of content are either encrypted or not.
* @param newSegment Must not be null
* @throws IOException If newSegment is null or decryption keys set up incorrectly
*/
protected void setFirstSegment(ContentObject newSegment) throws IOException {
if (null == newSegment) {
throw new NoMatchingContentFoundException("Cannot find first segment of " + getBaseName());
}
LinkObject theLink = null;
while (newSegment.isType(ContentType.LINK) && (!hasFlag(FlagTypes.DONT_DEREFERENCE))) {
// Automated dereferencing. Want to make a link object to read in this link, then
// dereference it to get the segment we really want. We then fix up the _baseName,
// and continue like nothing ever happened.
theLink = new LinkObject(newSegment, _handle);
pushDereferencedLink(theLink); // set _dereferencedLink to point to the new link, pushing
// old ones down the stack if necessary
// dereference will check for link cycles
newSegment = _dereferencedLink.dereference(_timeout);
Log.info("CCNAbstractInputStream: dereferencing link {0} to {1}, resulting data {2}", theLink.getVersionedName(),
theLink.link(), ((null == newSegment) ? "null" : newSegment.name()));
if (newSegment == null) {
// TODO -- catch error states. Do we throw exception or return null?
// Set error states -- when do we find link cycle and set the error on the link?
// Clear error state when update is successful.
// Two cases -- link loop or data not found.
if (_dereferencedLink.hasError()) {
if (_dereferencedLink.getError() instanceof LinkCycleException) {
// Leave the link set on the input stream, so that caller can explore errors.
Log.warning("Hit link cycle on link {0} pointing to {1}, cannot dereference. See this.dereferencedLink() for more information!",
_dereferencedLink.getVersionedName(), _dereferencedLink.link().targetName());
}
// Might also cover NoMatchingContentFoundException here...for now, just return null
// so can call it more than once.
throw _dereferencedLink.getError();
} else {
throw new NoMatchingContentFoundException("Cannot find first segment of " + getBaseName() + ", which is a link pointing to " + _dereferencedLink.link().targetName());
}
}
_baseName = SegmentationProfile.segmentRoot(newSegment.name());
// go around again,
}
if (newSegment.isType(ContentType.GONE)) {
_goneSegment = newSegment;
Log.info("getFirstSegment: got gone segment: " + _goneSegment.name());
} else if (newSegment.isType(ContentType.ENCR) && (null == _keys)) {
// The block is encrypted and we don't have keys
// Get the content name without the segment parent
ContentName contentName = SegmentationProfile.segmentRoot(newSegment.name());
// Attempt to retrieve the keys for this namespace
_keys = GroupAccessControlManager.keysForInput(contentName, newSegment.signedInfo().getPublisherKeyID(), _handle);
}
setCurrentSegment(newSegment);
}
/**
* Set up current segment for reading, including preparation for decryption if necessary.
* Called after getSegment/getFirstSegment/getNextSegment, which take care of verifying
* the segment for us. Assumes newSegment has been verified.
* @throws IOException If decryption keys set up incorrectly
*/
protected void setCurrentSegment(ContentObject newSegment) throws IOException {
_currentSegment = null;
_segmentReadStream = null;
if (null == newSegment) {
Log.info("FINDME: Setting current segment to null! Did a segment fail to verify?");
return;
}
_currentSegment = newSegment;
// Should we only set these on the first retrieval?
// getSegment will ensure we get a requested publisher (if we have one) for the
// first segment; once we have a publisher, it will ensure that future segments match it.
_publisher = newSegment.signedInfo().getPublisherKeyID();
_publisherKeyLocator = newSegment.signedInfo().getKeyLocator();
if (_goneSegment != newSegment) { // want pointer ==, not equals() here
// if we're decrypting, then set it up now
if (_keys != null) {
// We only do automated lookup of keys on first segment. Otherwise
// we assume we must have the keys or don't try to decrypt.
try {
// Reuse of current segment OK. Don't expect to have two separate readers
// independently use this stream without state confusion anyway.
_cipher = _keys.getSegmentDecryptionCipher(
SegmentationProfile.getSegmentNumber(_currentSegment.name()));
} catch (InvalidKeyException e) {
Log.warning("InvalidKeyException: " + e.getMessage());
throw new IOException("InvalidKeyException: " + e.getMessage());
} catch (InvalidAlgorithmParameterException e) {
Log.warning("InvalidAlgorithmParameterException: " + e.getMessage());
throw new IOException("InvalidAlgorithmParameterException: " + e.getMessage());
}
// Let's optimize random access to this buffer (e.g. as used by the decoders) by
// decrypting a whole ContentObject at a time. It's not a huge security risk,
// and right now we can't rewind the buffers so if we do try to decode out of
// an encrypted block we constantly restart from the beginning and redecrypt
// the content.
// Previously we used our own UnbufferedCipherInputStream class directly as
// our _segmentReadStream for encrypted data, as Java's CipherInputStreams
// assume block-oriented boundaries for decryption, and buffer incorrectly as a result.
// If we want to go back to incremental decryption, putting a small cache into that
// class to optimize going backwards would help.
// Unless we use a compressing cipher, the maximum data length for decrypted data
// is _currentSegment.content().length. But we might as well make something
// general that will handle all cases. There may be a more efficient way to
// do this; want to minimize copies.
byte [] bodyData = _cipher.update(_currentSegment.content());
byte[] tailData;
try {
tailData = _cipher.doFinal();
} catch (IllegalBlockSizeException e) {
Log.warning("IllegalBlockSizeException: " + e.getMessage());
throw new IOException("IllegalBlockSizeException: " + e.getMessage());
} catch (BadPaddingException e) {
Log.warning("BadPaddingException: " + e.getMessage());
throw new IOException("BadPaddingException: " + e.getMessage());
}
if ((null == tailData) || (0 == tailData.length)) {
_segmentReadStream = new ByteArrayInputStream(bodyData);
} else {
byte [] allData = new byte[bodyData.length + tailData.length];
// Still avoid 1.6 array ops
System.arraycopy(bodyData, 0, allData, 0, bodyData.length);
System.arraycopy(tailData, 0, allData, bodyData.length, tailData.length);
_segmentReadStream = new ByteArrayInputStream(allData);
}
} else {
if (_currentSegment.signedInfo().getType().equals(ContentType.ENCR)) {
// We only do automated lookup of keys on first segment.
Log.warning("Asked to read encrypted content, but not given a key to decrypt it. Decryption happening at higher level?");
}
_segmentReadStream = new ByteArrayInputStream(_currentSegment.content());
}
}
}
/**
* Rewinds read buffers for current segment to beginning of the segment.
* @throws IOException
*/
protected void rewindSegment() throws IOException {
if (null == _currentSegment) {
Log.info("Cannot reqind null segment.");
}
if (null == _segmentReadStream) {
setCurrentSegment(_currentSegment);
}
_segmentReadStream.reset(); // will reset to 0 if mark not caled
}
/**
* Retrieves a specific segment of this stream, indicated by segment number.
* Three navigation options: get first (leftmost) segment, get next segment,
* or get a specific segment.
* Have to assume that everyone is using our segment number encoding. Probably
* easier to ask raw streams to use that encoding (e.g. for packet numbers)
* than to flag streams as to whether they are using integers or segments.
* @param number Segment number to retrieve. See SegmentationProfile for numbering.
* If we already have this segment as #currentSegmentNumber(), will just
* return the current segment, and will not re-retrieve it from the network.
* @throws IOException If no matching content found (actually throws NoMatchingContentFoundException)
* or if there is an error at lower layers.
**/
protected ContentObject getSegment(long number) throws IOException {
if (_currentSegment != null) {
// what segment do we have right now? maybe we already have it
if (currentSegmentNumber() == number){
// we already have this segment... just use it
return _currentSegment;
}
}
// If no publisher specified a priori, _publisher will be null and we will get whoever is
// available that verifies for first segment. If _publisher specified a priori, or once we have
// retrieved a segment and set _publisher to the publisher of that segment, we will continue to
// retrieve segments by the same publisher.
return SegmentationProfile.getSegment(_baseName, number, _publisher, _timeout, this, _handle);
}
/**
* Checks whether we might have a next segment.
* @return Returns false if this content is marked as GONE (see ContentType), or if we have
* retrieved the segment marked as the last one, or, in a very rare case, if we're
* reading content that does not have segment markers.
*/
protected boolean hasNextSegment() throws IOException {
// We're looking at content marked GONE
if (null != _goneSegment) {
Log.info("getNextSegment: We have a gone segment, no next segment. Gone segment: " + _goneSegment.name());
return false;
}
if (null == _currentSegment) {
Log.severe("hasNextSegment() called when we have no current segment!");
throw new IOException("hasNextSegment() called when we have no current segment!");
}
// Check to see if finalBlockID is the current segment. If so, there should
// be no next segment. (If the writer makes a mistake and guesses the wrong
// value for finalBlockID, they won't put that wrong value in the segment they're
// guessing itself -- unless they want to try to extend a "closed" stream.
// Normally by the time they write that segment, they either know they're done or not.
if (null != _currentSegment.signedInfo().getFinalBlockID()) {
if (Arrays.equals(_currentSegment.signedInfo().getFinalBlockID(), _currentSegment.name().lastComponent())) {
Log.info("getNextSegment: there is no next segment. We have segment: " +
DataUtils.printHexBytes(_currentSegment.name().lastComponent()) + " which is marked as the final segment.");
return false;
}
}
if (!SegmentationProfile.isSegment(_currentSegment.name())) {
Log.info("Unsegmented content: {0}. No next segment.", _currentSegment.name());
return false;
}
return true;
}
/**
* Retrieve the next segment of the stream. Convenience method, uses #getSegment(long).
* @return the next segment, if found.
* @throws IOException
*/
protected ContentObject getNextSegment() throws IOException {
if (null == _currentSegment) {
Log.info("getNextSegment: no current segment, getting first segment.");
return getFirstSegment();
}
Log.info("getNextSegment: getting segment after " + _currentSegment.name());
return getSegment(nextSegmentNumber());
}
/**
* Retrieves the first segment of the stream, based on specified startingSegmentNumber
* (see #CCNAbstractInputStream(ContentName, Long, PublisherPublicKeyDigest, ContentKeys, CCNHandle)).
* Convenience method, uses #getSegment(long).
* @return the first segment, if found.
* @throws IOException If can't get a valid starting segment number
*/
protected ContentObject getFirstSegment() throws IOException {
if (null != _startingSegmentNumber) {
ContentObject firstSegment = getSegment(_startingSegmentNumber);
Log.info("getFirstSegment: segment number: " + _startingSegmentNumber + " got segment? " +
((null == firstSegment) ? "no " : firstSegment.name()));
return firstSegment;
} else {
throw new IOException("Stream does not have a valid starting segment number.");
}
}
/**
* Method to determine whether a retrieved block is the first segment of this stream (as
* specified by startingSegmentNumber, (see #CCNAbstractInputStream(ContentName, Long, PublisherPublicKeyDigest, ContentKeys, CCNHandle)).
* Overridden by subclasses to implement narrower constraints on names. Once first
* segment is retrieved, further segments can be identified just by segment-naming
* conventions (see SegmentationProfile).
*
* @param desiredName The expected name prefix for the stream.
* For CCNAbstractInputStream, assume that desiredName contains the name up to but not including
* segmentation information.
* @param segment The potential first segment.
* @return True if it is the first segment, false otherwise.
*/
protected boolean isFirstSegment(ContentName desiredName, ContentObject segment) {
if ((null != segment) && (SegmentationProfile.isSegment(segment.name()))) {
Log.info("is " + segment.name() + " a first segment of " + desiredName);
// In theory, the segment should be at most a versioning component different from desiredName.
// In the case of complex segmented objects (e.g. a KeyDirectory), where there is a version,
// then some name components, then a segment, desiredName should contain all of those other
// name components -- you can't use the usual versioning mechanisms to pull first segment anyway.
if (!desiredName.equals(SegmentationProfile.segmentRoot(segment.name()))) {
Log.info("Desired name :" + desiredName + " is not a prefix of segment: " + segment.name());
return false;
}
if (null != _startingSegmentNumber) {
return (_startingSegmentNumber.equals(SegmentationProfile.getSegmentNumber(segment.name())));
} else {
return SegmentationProfile.isFirstSegment(segment.name());
}
}
return false;
}
/**
* If we traversed a link to get this object, make it available.
*/
public synchronized LinkObject getDereferencedLink() { return _dereferencedLink; }
/**
* Use only if you know what you are doing.
*/
protected synchronized void setDereferencedLink(LinkObject dereferencedLink) { _dereferencedLink = dereferencedLink; }
/**
* Add a LinkObject to the stack we had to dereference to get here.
*/
protected synchronized void pushDereferencedLink(LinkObject dereferencedLink) {
if (null == dereferencedLink) {
return;
}
if (null != _dereferencedLink) {
if (null != dereferencedLink.getDereferencedLink()) {
Log.warning("Merging two link stacks -- {0} already has a dereferenced link from {1}. Behavior unpredictable.",
dereferencedLink.getVersionedName(), dereferencedLink.getDereferencedLink().getVersionedName());
}
dereferencedLink.pushDereferencedLink(_dereferencedLink);
}
setDereferencedLink(dereferencedLink);
}
/**
* Verifies the signature on a segment using cached bulk signature data (from Merkle Hash Trees)
* if it is available.
* TODO -- check to see if it matches desired publisher.
* @param segment the segment whose signature to verify in the context of this stream.
*/
public boolean verify(ContentObject segment) {
// First we verify.
// Low-level verify just checks that signer actually signed.
// High-level verify checks trust.
try {
// We could have several options here. This segment could be simply signed.
// or this could be part of a Merkle Hash Tree. If the latter, we could
// already have its signing information.
if (null == segment.signature().witness()) {
return segment.verify(_handle.keyManager());
}
// Compare to see whether this segment matches the root signature we previously verified, if
// not, verify and store the current signature.
// We need to compute the proxy regardless.
byte [] proxy = segment.computeProxy();
// OK, if we have an existing verified signature, and it matches this segment's
// signature, the proxy ought to match as well.
if ((null != _verifiedRootSignature) && (Arrays.equals(_verifiedRootSignature, segment.signature().signature()))) {
if ((null == proxy) || (null == _verifiedProxy) || (!Arrays.equals(_verifiedProxy, proxy))) {
Log.warning("Found segment: " + segment.name() + " whose digest fails to verify; segment length: " + segment.contentLength());
Log.info("Verification failure: " + segment.name() + " timestamp: " + segment.signedInfo().getTimestamp() + " content length: " + segment.contentLength() +
" proxy: " + DataUtils.printBytes(proxy) +
" expected proxy: " + DataUtils.printBytes(_verifiedProxy));
return false;
}
} else {
// Verifying a new segment. See if the signature verifies, otherwise store the signature
// and proxy.
if (!ContentObject.verify(proxy, segment.signature().signature(), segment.signedInfo(), segment.signature().digestAlgorithm(), _handle.keyManager())) {
Log.warning("Found segment: " + segment.name().toString() + " whose signature fails to verify; segment length: " + segment.contentLength() + ".");
return false;
} else {
// Remember current verifiers
_verifiedRootSignature = segment.signature().signature();
_verifiedProxy = proxy;
}
}
Log.info("Got segment: " + segment.name().toString() + ", verified.");
} catch (Exception e) {
Log.warning("Got an " + e.getClass().getName() + " exception attempting to verify segment: " + segment.name().toString() + ", treat as failure to verify.");
Log.warningStackTrace(e);
return false;
}
return true;
}
/**
* Returns the segment number for the next segment.
* Default segmentation generates sequentially-numbered stream
* segments but this method may be overridden in subclasses to
* perform re-assembly on streams that have been segmented differently.
* @return The index of the next segment of stream data.
*/
public long nextSegmentNumber() {
if (null == _currentSegment) {
return _startingSegmentNumber.longValue();
} else {
return segmentNumber() + 1;
}
}
/**
* @return Returns the segment number of the current segment if we have one, otherwise
* the expected startingSegmentNumber.
*/
public long segmentNumber() {
if (null == _currentSegment) {
return _startingSegmentNumber;
} else {
// This needs to work on streaming content that is not traditional fragments.
// The segmentation profile tries to do that, though it is seeming like the
// new segment representation means we will have to assume that representation
// even for stream content.
return SegmentationProfile.getSegmentNumber(_currentSegment.name());
}
}
/**
* @return Returns the segment number of the current segment if we have one, otherwise -1.
*/
protected long currentSegmentNumber() {
if (null == _currentSegment) {
return -1; // make sure we don't match inappropriately
}
return segmentNumber();
}
/**
* Checks to see whether this content has been marked as GONE (deleted). Will retrieve the first
* segment if we do not already have it in order to make this determination.
* @return true if stream is GONE.
* @throws NoMatchingContentFound exception if no first segment found
* @throws IOException if there is other difficulty retrieving the first segment.
*/
public boolean isGone() throws NoMatchingContentFoundException, IOException {
// TODO: once first segment is always read in constructor this code will change
if (null == _currentSegment) {
ContentObject firstSegment = getFirstSegment();
setFirstSegment(firstSegment); // sets _goneSegment, does link dereferencing,
// throws NoMatchingContentFoundException if firstSegment is null.
// this way all retry behavior is localized in the various versions of getFirstSegment.
// Previously what would happen is getFirstSegment would be called by isGone, return null,
// and we'd have a second chance to catch it on the call to update if things were slow. But
// that means we would get a more general update on a gone object.
}
// We might have set first segment in constructor, in which case we will also have set _goneSegment
if (null != _goneSegment) {
return true;
}
return false;
}
/**
*
* @return Return the single segment of a stream marked as GONE.
*/
public ContentObject deletionInformation() {
return _goneSegment;
}
/**
* Callers may need to access information about this stream's publisher.
* We eventually should (TODO) ensure that all the segments we're reading
* match in publisher information, and cache the verified publisher info.
* (In particular once we're doing trust calculations, to ensure we do them
* only once per stream.)
* But we do verify each segment, so start by pulling what's in the current segment.
* @return the publisher of the data in the stream (either as requested, or once we have
* data, as observed).
*/
public PublisherPublicKeyDigest publisher() {
return _publisher;
}
/**
* @return the key locator for this stream's publisher.
*/
public KeyLocator publisherKeyLocator() {
return _publisherKeyLocator;
}
/**
* @return the name of the current segment held by this string, or "null". Used for debugging.
*/
public String currentSegmentName() {
return ((null == _currentSegment) ? "null" : _currentSegment.name().toString());
}
@Override
public int available() throws IOException {
if (null == _segmentReadStream)
return 0;
return _segmentReadStream.available();
}
/**
* @return Whether this stream believes it is at eof (has read past the end of the
* last segment of the stream).
*/
public boolean eof() {
//Log.info("Checking eof: there yet? " + _atEOF);
return _atEOF;
}
@Override
public void close() throws IOException {
// don't have to do anything.
}
@Override
public synchronized void mark(int readlimit) {
_readlimit = readlimit;
_markBlock = segmentNumber();
if (null == _segmentReadStream) {
_markOffset = 0;
} else {
try {
_markOffset = _currentSegment.contentLength() - _segmentReadStream.available();
if (_segmentReadStream.markSupported()) {
_segmentReadStream.mark(readlimit);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
Log.finer("mark: block: " + segmentNumber() + " offset: " + _markOffset);
}
@Override
public boolean markSupported() {
return true;
}
@Override
public synchronized void reset() throws IOException {
// TODO: when first block is read in constructor this check can be removed
if (_currentSegment == null) {
setFirstSegment(getSegment(_markBlock));
} else if (currentSegmentNumber() == _markBlock) {
//already have the correct segment
if (tell() == _markOffset){
//already have the correct offset
} else {
// Reset and skip.
if (_segmentReadStream.markSupported()) {
_segmentReadStream.reset();
Log.finer("reset within block: block: " + segmentNumber() + " offset: " + _markOffset + " eof? " + _atEOF);
return;
} else {
setCurrentSegment(_currentSegment);
}
}
} else {
// getSegment doesn't pull segment if we already have the right one
setCurrentSegment(getSegment(_markBlock));
}
_segmentReadStream.skip(_markOffset);
_atEOF = false;
Log.finer("reset: block: " + segmentNumber() + " offset: " + _markOffset + " eof? " + _atEOF);
}
@Override
public long skip(long n) throws IOException {
Log.info("in skip("+n+")");
if (n < 0) {
return 0;
}
return readInternal(null, 0, (int)n);
}
/**
* @return Currently returns 0. Can be optionally overridden by subclasses.
* @throws IOException
*/
protected int segmentCount() throws IOException {
return 0;
}
/**
* Seek a stream to a specific byte offset from the start. Tries to avoid retrieving
* extra segments.
* @param position
* @throws IOException
*/
public void seek(long position) throws IOException {
Log.info("Seeking stream to " + position);
// TODO: when first block is read in constructor this check can be removed
if ((_currentSegment == null) || (!SegmentationProfile.isFirstSegment(_currentSegment.name()))) {
setFirstSegment(getFirstSegment());
skip(position);
} else if (position > tell()) {
// we are on the first segment already, just move forward
skip(position - tell());
} else {
// we are on the first segment already, just rewind back to the beginning
rewindSegment();
skip(position);
}
}
/**
* @return Returns position in byte offset. For CCNAbstractInputStream, provide an inadequate
* base implementation that returns the offset into the current segment (not the stream as
* a whole).
* @throws IOException
*/
public long tell() throws IOException {
return _currentSegment.contentLength() - _segmentReadStream.available();
}
/**
* @return Total length of the stream, if known, otherwise -1.
* @throws IOException
*/
public long length() throws IOException {
return -1;
}
} | First reversion of new API for cipher management. Need to do ACM lookup.
| javasrc/src/org/ccnx/ccn/io/CCNAbstractInputStream.java | First reversion of new API for cipher management. Need to do ACM lookup. |
|
Java | lgpl-2.1 | 42c2efb00d4ef303b92c41d7639d1d2afe4316c8 | 0 | Barteks2x/ForgeGradle,simon816/ForgeGradle,PaperMC/PaperGradle,kenzierocks/ForgeGradle,killjoy1221/ForgeGradle,nallar/ForgeGradle,matthewprenger/ForgeGradle,RX14/ForgeGradle,clienthax/ForgeGradle,kashike/ForgeGradle | package net.minecraftforge.gradle.tasks;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.minecraftforge.gradle.common.Constants;
import net.minecraftforge.gradle.delayed.DelayedFile;
import net.minecraftforge.gradle.patching.ContextualPatch;
import net.minecraftforge.gradle.patching.ContextualPatch.PatchStatus;
import net.minecraftforge.gradle.tasks.abstractutil.EditJarTask;
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.LogLevel;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.io.Files;
public class ProcessSrcJarTask extends EditJarTask
{
private List<ResourceHolder> stages = new LinkedList<ResourceHolder>();
@Input
private int maxFuzz = 0;
private ContextProvider PROVIDER;
@Override
public String asRead(String file)
{
return file;
}
@Override
public void doStuffBefore() throws Throwable
{
PROVIDER = new ContextProvider(sourceMap);
}
@Override
public void doStuffMiddle() throws Throwable
{
for (ResourceHolder stage : stages)
{
if (!stage.srcDirs.isEmpty())
{
getLogger().lifecycle("Injecting {} files", stage.name);
for (RelFile rel : stage.getRelInjects())
{
String relative = rel.getRelative();
if (sourceMap.containsKey(relative) || resourceMap.containsKey(relative))
continue; //ignore duplicates.
if (relative.endsWith(".java"))
{
sourceMap.put(relative, Files.toString(rel.file, Charset.defaultCharset()));
}
else
{
resourceMap.put(relative, Files.asByteSource(rel.file).read());
}
}
}
if (stage.patchDir != null)
{
getLogger().lifecycle("Applying {} patches", stage.name);
applyPatchStage(stage.name, stage.getPatchFiles());
}
}
}
public void applyPatchStage(String stage, FileCollection patchFiles) throws Throwable
{
getLogger().info("Reading patches for stage {}", stage);
ArrayList<PatchedFile> patches = readPatches(patchFiles);
boolean fuzzed = false;
getLogger().info("Applying patches for stage {}", stage);
Throwable failure = null;
for (PatchedFile patch : patches)
{
List<ContextualPatch.PatchReport> errors = patch.patch.patch(false);
for (ContextualPatch.PatchReport report : errors)
{
// catch failed patches
if (!report.getStatus().isSuccess())
{
File reject = patch.makeRejectFile();
if (reject.exists())
{
reject.delete();
}
getLogger().log(LogLevel.ERROR, "Patching failed: {} {}", PROVIDER.strip(report.getTarget()), report.getFailure().getMessage());
// now spit the hunks
int failed = 0;
for (ContextualPatch.HunkReport hunk : report.getHunks())
{
// catch the failed hunks
if (!hunk.getStatus().isSuccess())
{
failed++;
getLogger().error(" " + hunk.getHunkID() + ": " + (hunk.getFailure() != null ? hunk.getFailure().getMessage() : "") + " @ " + hunk.getIndex());
Files.append(String.format("++++ REJECTED PATCH %d\n", hunk.getHunkID()), reject, Charsets.UTF_8);
Files.append(Joiner.on('\n').join(hunk.hunk.lines), reject, Charsets.UTF_8);
Files.append(String.format("\n++++ END PATCH\n"), reject, Charsets.UTF_8);
}
else if (hunk.getStatus() == PatchStatus.Fuzzed)
{
getLogger().info(" " + hunk.getHunkID() + " fuzzed " + hunk.getFuzz() + "!");
}
}
getLogger().log(LogLevel.ERROR, " {}/{} failed", failed, report.getHunks().size());
getLogger().log(LogLevel.ERROR, " Rejects written to {}", reject.getAbsolutePath());
if (failure == null)
failure = report.getFailure();
}
// catch fuzzed patches
else if (report.getStatus() == ContextualPatch.PatchStatus.Fuzzed)
{
getLogger().log(LogLevel.INFO, "Patching fuzzed: {}", PROVIDER.strip(report.getTarget()));
// set the boolean for later use
fuzzed = true;
// now spit the hunks
for (ContextualPatch.HunkReport hunk : report.getHunks())
{
// catch the failed hunks
if (hunk.getStatus() == PatchStatus.Fuzzed)
{
getLogger().info(" {} fuzzed {}!", hunk.getHunkID(), hunk.getFuzz());
}
}
if (failure == null)
failure = report.getFailure();
}
// sucesful patches
else
{
getLogger().info("Patch succeeded: {}", PROVIDER.strip(report.getTarget()));
}
}
}
if (fuzzed)
{
getLogger().lifecycle("Patches Fuzzed!");
}
}
private ArrayList<PatchedFile> readPatches(FileCollection patchFiles) throws IOException
{
ArrayList<PatchedFile> patches = new ArrayList<PatchedFile>();
for (File file : patchFiles.getFiles())
{
if (file.getPath().endsWith(".patch"))
{
patches.add(readPatch(file));
}
}
return patches;
}
private PatchedFile readPatch(File file) throws IOException
{
getLogger().debug("Reading patch file: {}", file);
return new PatchedFile(file);
}
@InputFiles
public FileCollection getAllPatches()
{
FileCollection col = null;
for (ResourceHolder holder : stages)
{
if (holder.patchDir == null)
continue;
else if (col == null)
col = holder.getPatchFiles();
else
col = getProject().files(col, holder.getPatchFiles());
}
return col;
}
@InputFiles
public FileCollection getAllInjects()
{
FileCollection col = null;
for (ResourceHolder holder : stages)
if (col == null)
col = holder.getInjects();
else
col = getProject().files(col, holder.getInjects());
return col;
}
public void addStage(String name, DelayedFile patchDir, DelayedFile... injects)
{
stages.add(new ResourceHolder(name, patchDir, Arrays.asList(injects)));
}
public void addStage(String name, DelayedFile patchDir)
{
stages.add(new ResourceHolder(name, patchDir));
}
@Override
public void doStuffAfter() throws Throwable
{
}
public int getMaxFuzz()
{
return maxFuzz;
}
public void setMaxFuzz(int maxFuzz)
{
this.maxFuzz = maxFuzz;
}
private class PatchedFile
{
public final File fileToPatch;
public final ContextualPatch patch;
public PatchedFile(File file) throws IOException
{
this.fileToPatch = file;
this.patch = ContextualPatch.create(Files.toString(file, Charset.defaultCharset()), PROVIDER).setAccessC14N(true).setMaxFuzz(getMaxFuzz());
}
public File makeRejectFile()
{
return new File(fileToPatch.getParentFile(), fileToPatch.getName() + ".rej");
}
}
/**
* A private inner class to be used with the FmlPatches
*/
private class ContextProvider implements ContextualPatch.IContextProvider
{
private Map<String, String> fileMap;
private final int STRIP = 3;
public ContextProvider(Map<String, String> fileMap)
{
this.fileMap = fileMap;
}
public String strip(String target)
{
target = target.replace('\\', '/');
int index = 0;
for (int x = 0; x < STRIP; x++)
{
index = target.indexOf('/', index) + 1;
}
return target.substring(index);
}
@Override
public List<String> getData(String target)
{
target = strip(target);
if (fileMap.containsKey(target))
{
String[] lines = fileMap.get(target).split("\r\n|\r|\n");
List<String> ret = new ArrayList<String>();
for (String line : lines)
{
ret.add(line);
}
return ret;
}
return null;
}
@Override
public void setData(String target, List<String> data)
{
target = strip(target);
fileMap.put(target, Joiner.on(Constants.NEWLINE).join(data));
}
}
/**
* A little resource holder to make my life a teeny bit easier..
*/
private final class ResourceHolder
{
final String name;
final DelayedFile patchDir;
final List<DelayedFile> srcDirs;
public ResourceHolder(String name, DelayedFile patchDir, List<DelayedFile> srcDirs)
{
this.name = name;
this.patchDir = patchDir;
this.srcDirs = srcDirs;
}
public ResourceHolder(String name, DelayedFile patchDir)
{
this.name = name;
this.patchDir = patchDir;
this.srcDirs = new ArrayList<DelayedFile>(0);
}
public FileCollection getPatchFiles()
{
File patch = getProject().file(patchDir);
if (patch.isDirectory())
return getProject().fileTree(patch);
else if (patch.getPath().endsWith("zip") || patch.getPath().endsWith("jar"))
return getProject().zipTree(patch);
else
return getProject().files(patch);
}
public FileCollection getInjects()
{
ArrayList<FileCollection> trees = new ArrayList<FileCollection>(srcDirs.size());
for (DelayedFile f : srcDirs)
trees.add(getProject().fileTree(f.call()));
return getProject().files(trees);
}
public List<RelFile> getRelInjects()
{
LinkedList<RelFile> files = new LinkedList<RelFile>();
for (DelayedFile df : srcDirs)
{
File dir = df.call();
if (dir.isDirectory())
{
for (File f : getProject().fileTree(dir))
{
files.add(new RelFile(f, dir));
}
}
else
{
files.add(new RelFile(dir, dir.getParentFile()));
}
}
return files;
}
}
private static final class RelFile
{
public final File file;
public final File root;
public RelFile(File file, File root)
{
this.file = file;
this.root = root;
}
public String getRelative() throws IOException
{
return file.getCanonicalPath().substring(root.getCanonicalPath().length() + 1).replace('\\', '/');
}
}
}
| src/main/java/net/minecraftforge/gradle/tasks/ProcessSrcJarTask.java | package net.minecraftforge.gradle.tasks;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.minecraftforge.gradle.common.Constants;
import net.minecraftforge.gradle.delayed.DelayedFile;
import net.minecraftforge.gradle.patching.ContextualPatch;
import net.minecraftforge.gradle.patching.ContextualPatch.PatchStatus;
import net.minecraftforge.gradle.tasks.abstractutil.EditJarTask;
import org.gradle.api.file.FileCollection;
import org.gradle.api.logging.LogLevel;
import org.gradle.api.tasks.Input;
import org.gradle.api.tasks.InputFiles;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.io.Files;
public class ProcessSrcJarTask extends EditJarTask
{
private List<ResourceHolder> stages = new LinkedList<ResourceHolder>();
@Input
private int maxFuzz = 0;
private ContextProvider PROVIDER;
@Override
public String asRead(String file)
{
return file;
}
@Override
public void doStuffBefore() throws Throwable
{
PROVIDER = new ContextProvider(sourceMap);
}
@Override
public void doStuffMiddle() throws Throwable
{
for (ResourceHolder stage : stages)
{
if (!stage.srcDirs.isEmpty())
{
getLogger().lifecycle("Injecting {} files", stage.name);
for (RelFile rel : stage.getRelInjects())
{
String relative = rel.getRelative();
if (sourceMap.containsKey(relative) || resourceMap.containsKey("relative"))
continue; //ignore duplicates.
if (relative.endsWith(".java"))
{
sourceMap.put(relative, Files.toString(rel.file, Charset.defaultCharset()));
}
else
{
resourceMap.put(relative, Files.asByteSource(rel.file).read());
}
}
}
if (stage.patchDir != null)
{
getLogger().lifecycle("Applying {} patches", stage.name);
applyPatchStage(stage.name, stage.getPatchFiles());
}
}
}
public void applyPatchStage(String stage, FileCollection patchFiles) throws Throwable
{
getLogger().info("Reading patches for stage {}", stage);
ArrayList<PatchedFile> patches = readPatches(patchFiles);
boolean fuzzed = false;
getLogger().info("Applying patches for stage {}", stage);
Throwable failure = null;
for (PatchedFile patch : patches)
{
List<ContextualPatch.PatchReport> errors = patch.patch.patch(false);
for (ContextualPatch.PatchReport report : errors)
{
// catch failed patches
if (!report.getStatus().isSuccess())
{
File reject = patch.makeRejectFile();
if (reject.exists())
{
reject.delete();
}
getLogger().log(LogLevel.ERROR, "Patching failed: {} {}", PROVIDER.strip(report.getTarget()), report.getFailure().getMessage());
// now spit the hunks
int failed = 0;
for (ContextualPatch.HunkReport hunk : report.getHunks())
{
// catch the failed hunks
if (!hunk.getStatus().isSuccess())
{
failed++;
getLogger().error(" " + hunk.getHunkID() + ": " + (hunk.getFailure() != null ? hunk.getFailure().getMessage() : "") + " @ " + hunk.getIndex());
Files.append(String.format("++++ REJECTED PATCH %d\n", hunk.getHunkID()), reject, Charsets.UTF_8);
Files.append(Joiner.on('\n').join(hunk.hunk.lines), reject, Charsets.UTF_8);
Files.append(String.format("\n++++ END PATCH\n"), reject, Charsets.UTF_8);
}
else if (hunk.getStatus() == PatchStatus.Fuzzed)
{
getLogger().info(" " + hunk.getHunkID() + " fuzzed " + hunk.getFuzz() + "!");
}
}
getLogger().log(LogLevel.ERROR, " {}/{} failed", failed, report.getHunks().size());
getLogger().log(LogLevel.ERROR, " Rejects written to {}", reject.getAbsolutePath());
if (failure == null)
failure = report.getFailure();
}
// catch fuzzed patches
else if (report.getStatus() == ContextualPatch.PatchStatus.Fuzzed)
{
getLogger().log(LogLevel.INFO, "Patching fuzzed: {}", PROVIDER.strip(report.getTarget()));
// set the boolean for later use
fuzzed = true;
// now spit the hunks
for (ContextualPatch.HunkReport hunk : report.getHunks())
{
// catch the failed hunks
if (hunk.getStatus() == PatchStatus.Fuzzed)
{
getLogger().info(" {} fuzzed {}!", hunk.getHunkID(), hunk.getFuzz());
}
}
if (failure == null)
failure = report.getFailure();
}
// sucesful patches
else
{
getLogger().info("Patch succeeded: {}", PROVIDER.strip(report.getTarget()));
}
}
}
if (fuzzed)
{
getLogger().lifecycle("Patches Fuzzed!");
}
}
private ArrayList<PatchedFile> readPatches(FileCollection patchFiles) throws IOException
{
ArrayList<PatchedFile> patches = new ArrayList<PatchedFile>();
for (File file : patchFiles.getFiles())
{
if (file.getPath().endsWith(".patch"))
{
patches.add(readPatch(file));
}
}
return patches;
}
private PatchedFile readPatch(File file) throws IOException
{
getLogger().debug("Reading patch file: {}", file);
return new PatchedFile(file);
}
@InputFiles
public FileCollection getAllPatches()
{
FileCollection col = null;
for (ResourceHolder holder : stages)
{
if (holder.patchDir == null)
continue;
else if (col == null)
col = holder.getPatchFiles();
else
col = getProject().files(col, holder.getPatchFiles());
}
return col;
}
@InputFiles
public FileCollection getAllInjects()
{
FileCollection col = null;
for (ResourceHolder holder : stages)
if (col == null)
col = holder.getInjects();
else
col = getProject().files(col, holder.getInjects());
return col;
}
public void addStage(String name, DelayedFile patchDir, DelayedFile... injects)
{
stages.add(new ResourceHolder(name, patchDir, Arrays.asList(injects)));
}
public void addStage(String name, DelayedFile patchDir)
{
stages.add(new ResourceHolder(name, patchDir));
}
@Override
public void doStuffAfter() throws Throwable
{
}
public int getMaxFuzz()
{
return maxFuzz;
}
public void setMaxFuzz(int maxFuzz)
{
this.maxFuzz = maxFuzz;
}
private class PatchedFile
{
public final File fileToPatch;
public final ContextualPatch patch;
public PatchedFile(File file) throws IOException
{
this.fileToPatch = file;
this.patch = ContextualPatch.create(Files.toString(file, Charset.defaultCharset()), PROVIDER).setAccessC14N(true).setMaxFuzz(getMaxFuzz());
}
public File makeRejectFile()
{
return new File(fileToPatch.getParentFile(), fileToPatch.getName() + ".rej");
}
}
/**
* A private inner class to be used with the FmlPatches
*/
private class ContextProvider implements ContextualPatch.IContextProvider
{
private Map<String, String> fileMap;
private final int STRIP = 3;
public ContextProvider(Map<String, String> fileMap)
{
this.fileMap = fileMap;
}
public String strip(String target)
{
target = target.replace('\\', '/');
int index = 0;
for (int x = 0; x < STRIP; x++)
{
index = target.indexOf('/', index) + 1;
}
return target.substring(index);
}
@Override
public List<String> getData(String target)
{
target = strip(target);
if (fileMap.containsKey(target))
{
String[] lines = fileMap.get(target).split("\r\n|\r|\n");
List<String> ret = new ArrayList<String>();
for (String line : lines)
{
ret.add(line);
}
return ret;
}
return null;
}
@Override
public void setData(String target, List<String> data)
{
target = strip(target);
fileMap.put(target, Joiner.on(Constants.NEWLINE).join(data));
}
}
/**
* A little resource holder to make my life a teeny bit easier..
*/
private final class ResourceHolder
{
final String name;
final DelayedFile patchDir;
final List<DelayedFile> srcDirs;
public ResourceHolder(String name, DelayedFile patchDir, List<DelayedFile> srcDirs)
{
this.name = name;
this.patchDir = patchDir;
this.srcDirs = srcDirs;
}
public ResourceHolder(String name, DelayedFile patchDir)
{
this.name = name;
this.patchDir = patchDir;
this.srcDirs = new ArrayList<DelayedFile>(0);
}
public FileCollection getPatchFiles()
{
File patch = getProject().file(patchDir);
if (patch.isDirectory())
return getProject().fileTree(patch);
else if (patch.getPath().endsWith("zip") || patch.getPath().endsWith("jar"))
return getProject().zipTree(patch);
else
return getProject().files(patch);
}
public FileCollection getInjects()
{
ArrayList<FileCollection> trees = new ArrayList<FileCollection>(srcDirs.size());
for (DelayedFile f : srcDirs)
trees.add(getProject().fileTree(f.call()));
return getProject().files(trees);
}
public List<RelFile> getRelInjects()
{
LinkedList<RelFile> files = new LinkedList<RelFile>();
for (DelayedFile df : srcDirs)
{
File dir = df.call();
if (dir.isDirectory())
{
for (File f : getProject().fileTree(dir))
{
files.add(new RelFile(f, dir));
}
}
else
{
files.add(new RelFile(dir, dir.getParentFile()));
}
}
return files;
}
}
private static final class RelFile
{
public final File file;
public final File root;
public RelFile(File file, File root)
{
this.file = file;
this.root = root;
}
public String getRelative() throws IOException
{
return file.getCanonicalPath().substring(root.getCanonicalPath().length() + 1).replace('\\', '/');
}
}
}
| fixed potential duplicate resources problem
| src/main/java/net/minecraftforge/gradle/tasks/ProcessSrcJarTask.java | fixed potential duplicate resources problem |
|
Java | lgpl-2.1 | error: pathspec 'LGPL/CommonSoftware/acsGUIs/acsGUIutil/src/alma/acs/gui/widgets/ExtendedTextArea.java' did not match any file(s) known to git
| b64585668478871985af39abe0bd3ada57a389cf | 1 | jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,ACS-Community/ACS,jbarriosc/ACSUFRO,csrg-utfsm/acscb,csrg-utfsm/acscb,ACS-Community/ACS,csrg-utfsm/acscb,csrg-utfsm/acscb,jbarriosc/ACSUFRO,csrg-utfsm/acscb,ACS-Community/ACS,jbarriosc/ACSUFRO,ACS-Community/ACS,ACS-Community/ACS,ACS-Community/ACS,csrg-utfsm/acscb,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,jbarriosc/ACSUFRO,csrg-utfsm/acscb,jbarriosc/ACSUFRO,ACS-Community/ACS,ACS-Community/ACS,csrg-utfsm/acscb | /*
ALMA - Atacama Large Millimiter Array
* Copyright (c) European Southern Observatory, 2013
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package alma.acs.gui.widgets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.File;
import java.io.FileWriter;
import javax.swing.JFileChooser;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import javax.swing.JTextArea;
import javax.swing.filechooser.FileNameExtensionFilter;
import javax.swing.text.Element;
import alma.acs.gui.util.threadsupport.EDTExecutor;
/**
* <CODE>ExtendedTextArea</CODE> is a {@link JTextArea} written to replace
* <CODE>com.cosylab.gui.components.r2.SmartTextArea</CODE>.
* It is a light component with a minimum set of functionalities compared to that of
* the abeans <CODE>SmartTextArea</CODE> because most of such functionalities have
* never been used in ALMA.
* <P>
* This widget allows to easily add messages controlling the number of messages
* displayed to avoid out of memory while running for long time.
* It allows to save the content of the widget in a file by means of a popup menu.
* <P>
* The widget displays at most {@link #maxNumOfMessages} (default is {@link #defaultNumOfMessages})
* at a given time: when a new message arrives, the oldest one is removed if it is the case.
*
* @author acaproni
* @since ACS 12.1
*/
public class ExtendedTextArea extends JTextArea implements MouseListener {
/**
* The popup menu shown when the user presses the right mouse button
* over the <CODE>ExtendedTextArea</CODE> component.
*
* @author acaproni
* @since ACS 12.1
*/
class PopupMenu extends JPopupMenu {
/**
* The menu item to save the content of the widget
*/
public final JMenuItem saveMI;
/**
* Constructor.
*/
public PopupMenu() {
saveMI=new JMenuItem("Save");
add(saveMI);
saveMI.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final JFileChooser chooser = new JFileChooser();
FileNameExtensionFilter filter = new FileNameExtensionFilter("Text files", "txt");
chooser.setFileFilter(filter);
int returnVal = chooser.showSaveDialog(ExtendedTextArea.this);
if(returnVal == JFileChooser.APPROVE_OPTION) {
Thread saveThread = new Thread(new Runnable() {
@Override
public void run() {
ExtendedTextArea.this.saveContent(chooser.getSelectedFile(),ExtendedTextArea.this.getText());
}
},"ExtendedTextArea.saveThread");
saveThread.setDaemon(true);
saveThread.start();
}
}
});
}
}
/**
* The default of the max number of messages displayed by the widget.
*/
public static final int defaultNumOfMessages=500;
/**
* The max number of messages displayed by the widget.
*/
private int maxNumOfMessages=ExtendedTextArea.defaultNumOfMessages;
/**
* The popup menu
* @see PopupMenu
*/
private PopupMenu menu;
/**
* Constructor
*
* @param maxNumOfMessages The max number of messages displayed by the widget
*/
public ExtendedTextArea(int maxNumOfMessages) {
this.maxNumOfMessages=maxNumOfMessages;
EDTExecutor.instance().execute(new Runnable() {
@Override
public void run() {
initGUI();
}
});
}
/**
* Constructor with the default max number of messages ({@link #defaultNumOfMessages}
*/
public ExtendedTextArea() {
this(defaultNumOfMessages);
}
/**
* Initialize the widget
*/
private void initGUI() {
setEditable(false);
addMouseListener(this);
menu = new PopupMenu();
}
/**
* Append the passed message to the status area.
*/
public void append(final String msg) {
if (msg==null || msg.isEmpty()) {
// Nothing to do
return;
}
EDTExecutor.instance().execute(new Runnable() {
@Override
public void run() {
String txtToAppend=(msg.endsWith("\n"))?msg.substring(0,msg.length()-1):msg;
if (getDocument().getDefaultRootElement().getElementCount()>0) {
txtToAppend="\n"+txtToAppend;
}
ExtendedTextArea.super.append(txtToAppend);
while (getDocument().getDefaultRootElement().getElementCount()>maxNumOfMessages) {
Element root = getDocument().getDefaultRootElement();
Element first = root.getElement(0);
try {
getDocument().remove(first.getStartOffset(), first.getEndOffset());
} catch (Throwable t) {
t.printStackTrace();
}
}
}
});
}
/**
* @see MouseListener
*/
@Override
public void mouseClicked(MouseEvent e) {}
/**
* @see MouseListener
*/
@Override
public void mousePressed(MouseEvent e) {
if (e.isPopupTrigger()){
menu.show(e.getComponent(), e.getX(), e.getY());
}
}
/**
* @see MouseListener
*/
@Override
public void mouseReleased(MouseEvent e) {
if (e.isPopupTrigger()){
menu.show(e.getComponent(), e.getX(), e.getY());
}
}
/**
* @see MouseListener
*/
@Override
public void mouseEntered(MouseEvent e) {}
/**
* @see MouseListener
*/
@Override
public void mouseExited(MouseEvent e) {}
/**
* Save the content of the text area in a file with the passed name.
* <P>
* This method must not run into the EDT.
*
* @param outFile The file to save the content of the text area into
* @param content The content of the text area
*/
private void saveContent(File outFile, String content) {
if (outFile==null) {
throw new IllegalArgumentException("The file can't be null!");
}
if (content==null) {
throw new IllegalArgumentException("The string to be saved can't be null!");
}
if (content.isEmpty()) {
return;
}
FileWriter writer=null;
String errorMsg=null; // Set in case of error
try {
writer = new FileWriter(outFile);
writer.write(content);
} catch (Throwable t) {
errorMsg="Error writing into "+outFile.getPath()+": "+t.getMessage();
System.err.println(errorMsg);
t.printStackTrace(System.err);
} finally {
if (writer!=null) {
try {
writer.close();
} catch (Throwable t) {
String msg="Error closing "+outFile.getPath()+": "+t.getMessage();
errorMsg=(errorMsg==null)?msg:"\n"+msg;
System.err.println(msg);
t.printStackTrace(System.err);
}
}
}
// Report the error, if any
if (errorMsg!=null) {
JOptionPane.showMessageDialog(this, errorMsg, "Error saving data", JOptionPane.ERROR_MESSAGE);
}
}
}
| LGPL/CommonSoftware/acsGUIs/acsGUIutil/src/alma/acs/gui/widgets/ExtendedTextArea.java | ExtendedTextArea has been written to replace com.cosylab.gui.components.r2.SmartTextArea for ICTJ:ICT-1111.
It is a JTextArea that shows messages. The number of lines displayed by the widget is bounded to avoid out of memory for long running applications (customizable in the constructor).
The widget has a popup menu that allows the user to save the content in a text file.
git-svn-id: afcf11d89342f630bd950d18a70234a9e277d909@193970 523d945c-050c-4681-91ec-863ad3bb968a
| LGPL/CommonSoftware/acsGUIs/acsGUIutil/src/alma/acs/gui/widgets/ExtendedTextArea.java | ExtendedTextArea has been written to replace com.cosylab.gui.components.r2.SmartTextArea for ICTJ:ICT-1111. It is a JTextArea that shows messages. The number of lines displayed by the widget is bounded to avoid out of memory for long running applications (customizable in the constructor). The widget has a popup menu that allows the user to save the content in a text file. |
|
Java | apache-2.0 | c3dd128b5d5404a173139a8ae7d33ded842f4042 | 0 | asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,semonte/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,FHannes/intellij-community,allotria/intellij-community,da1z/intellij-community,FHannes/intellij-community,signed/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,signed/intellij-community,allotria/intellij-community,da1z/intellij-community,da1z/intellij-community,asedunov/intellij-community,signed/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,xfournet/intellij-community,FHannes/intellij-community,FHannes/intellij-community,semonte/intellij-community,ibinti/intellij-community,apixandru/intellij-community,asedunov/intellij-community,da1z/intellij-community,da1z/intellij-community,signed/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,signed/intellij-community,allotria/intellij-community,xfournet/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,ibinti/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,FHannes/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ibinti/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,allotria/intellij-community,FHannes/intellij-community,xfournet/intellij-community,FHannes/intellij-community,FHannes/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,signed/intellij-community,xfournet/intellij-community,ibinti/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ibinti/intellij-community,signed/intellij-community,apixandru/intellij-community,da1z/intellij-community,signed/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,allotria/intellij-community,da1z/intellij-community,apixandru/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,semonte/intellij-community,asedunov/intellij-community,semonte/intellij-community,da1z/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,allotria/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,allotria/intellij-community,semonte/intellij-community,asedunov/intellij-community,semonte/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,da1z/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,allotria/intellij-community,apixandru/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,signed/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,signed/intellij-community,suncycheng/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,semonte/intellij-community,signed/intellij-community,semonte/intellij-community,asedunov/intellij-community,apixandru/intellij-community,xfournet/intellij-community,da1z/intellij-community,ibinti/intellij-community,semonte/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,da1z/intellij-community,signed/intellij-community,asedunov/intellij-community,allotria/intellij-community,FHannes/intellij-community,vvv1559/intellij-community | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.memory.tracking;
import com.intellij.debugger.DebuggerManager;
import com.intellij.debugger.engine.DebugProcess;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.SuspendContextImpl;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.events.DebuggerCommandImpl;
import com.intellij.debugger.engine.events.SuspendContextCommandImpl;
import com.intellij.debugger.memory.component.InstancesTracker;
import com.intellij.debugger.memory.component.MemoryViewDebugProcessData;
import com.intellij.debugger.memory.event.InstancesTrackerListener;
import com.intellij.debugger.memory.utils.StackFrameItem;
import com.intellij.debugger.ui.breakpoints.JavaLineBreakpointType;
import com.intellij.debugger.ui.breakpoints.LineBreakpoint;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.xdebugger.XDebugSession;
import com.intellij.xdebugger.XDebuggerManager;
import com.intellij.xdebugger.breakpoints.XBreakpoint;
import com.intellij.xdebugger.impl.XDebuggerManagerImpl;
import com.intellij.xdebugger.impl.breakpoints.LineBreakpointState;
import com.intellij.xdebugger.impl.breakpoints.XLineBreakpointImpl;
import com.sun.jdi.Location;
import com.sun.jdi.Method;
import com.sun.jdi.ObjectReference;
import com.sun.jdi.ReferenceType;
import com.sun.jdi.event.LocatableEvent;
import com.sun.jdi.request.BreakpointRequest;
import com.sun.jdi.request.EventRequest;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.java.debugger.breakpoints.properties.JavaLineBreakpointProperties;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
public class ConstructorInstancesTracker implements TrackerForNewInstances, Disposable, BackgroundTracker {
private static final int TRACKED_INSTANCES_LIMIT = 2000;
private final String myClassName;
private final Project myProject;
private final MyConstructorBreakpoints myBreakpoint;
@Nullable
private HashSet<ObjectReference> myNewObjects = null;
@NotNull
private HashSet<ObjectReference> myTrackedObjects = new HashSet<>();
private volatile boolean myIsBackgroundMode;
private volatile boolean myIsBackgroundTrackingEnabled;
public ConstructorInstancesTracker(@NotNull ReferenceType ref,
@NotNull XDebugSession debugSession,
@NotNull InstancesTracker instancesTracker) {
myProject = debugSession.getProject();
myIsBackgroundTrackingEnabled = instancesTracker.isBackgroundTrackingEnabled();
myClassName = ref.name();
final DebugProcessImpl debugProcess = (DebugProcessImpl)DebuggerManager.getInstance(myProject)
.getDebugProcess(debugSession.getDebugProcess().getProcessHandler());
instancesTracker.addTrackerListener(new InstancesTrackerListener() {
@Override
public void backgroundTrackingValueChanged(boolean newState) {
if (myIsBackgroundTrackingEnabled != newState) {
myIsBackgroundTrackingEnabled = newState;
debugProcess.getManagerThread().schedule(new DebuggerCommandImpl() {
@Override
protected void action() throws Exception {
if (newState) {
myBreakpoint.enable();
}
else {
myBreakpoint.disable();
}
}
});
}
}
}, this);
final JavaLineBreakpointType breakPointType = new JavaLineBreakpointType();
final XBreakpoint bpn = new XLineBreakpointImpl<>(breakPointType,
((XDebuggerManagerImpl)XDebuggerManager.getInstance(myProject))
.getBreakpointManager(),
new JavaLineBreakpointProperties(),
new LineBreakpointState<>());
myBreakpoint = new MyConstructorBreakpoints(myProject, bpn);
myBreakpoint.createRequestForPreparedClass(debugProcess, ref);
Disposer.register(myBreakpoint, () -> debugProcess.getManagerThread().schedule(new DebuggerCommandImpl() {
@Override
protected void action() throws Exception {
disable();
debugProcess.getRequestsManager().deleteRequest(myBreakpoint);
myBreakpoint.delete();
}
}));
}
public void obsolete() {
if (myNewObjects != null) {
myNewObjects.forEach(ObjectReference::enableCollection);
}
myNewObjects = null;
if (!myIsBackgroundMode || myIsBackgroundTrackingEnabled) {
myBreakpoint.enable();
}
final XDebugSession session = XDebuggerManager.getInstance(myProject).getCurrentSession();
if (session != null) {
final DebugProcess process = DebuggerManager.getInstance(myProject).getDebugProcess(session.getDebugProcess().getProcessHandler());
final MemoryViewDebugProcessData data = process.getUserData(MemoryViewDebugProcessData.KEY);
if (data != null) {
data.getTrackedStacks().release();
}
}
}
public void commitTracked() {
myNewObjects = myTrackedObjects;
myTrackedObjects = new HashSet<>();
}
@NotNull
@Override
public List<ObjectReference> getNewInstances() {
return myNewObjects == null ? Collections.EMPTY_LIST : new ArrayList<>(myNewObjects);
}
@Override
public int getCount() {
return myNewObjects == null ? 0 : myNewObjects.size();
}
public void enable() {
myBreakpoint.enable();
}
public void disable() {
myBreakpoint.disable();
}
@Override
public boolean isReady() {
return myNewObjects != null;
}
@Override
public void dispose() {
Disposer.dispose(myBreakpoint);
}
@Override
public void setBackgroundMode(boolean isBackgroundMode) {
if (myIsBackgroundMode == isBackgroundMode) {
return;
}
myIsBackgroundMode = isBackgroundMode;
if (isBackgroundMode) {
doEnableBackgroundMode();
}
else {
doDisableBackgroundMode();
}
}
private void doDisableBackgroundMode() {
myBreakpoint.enable();
}
private void doEnableBackgroundMode() {
if (!myIsBackgroundTrackingEnabled) {
myBreakpoint.disable();
}
}
private final class MyConstructorBreakpoints extends LineBreakpoint<JavaLineBreakpointProperties> implements Disposable {
private final List<BreakpointRequest> myRequests = new ArrayList<>();
private volatile boolean myIsEnabled = false;
private volatile boolean myIsDeleted = false;
MyConstructorBreakpoints(Project project, XBreakpoint xBreakpoint) {
super(project, xBreakpoint);
setVisible(false);
}
@Override
protected void createRequestForPreparedClass(DebugProcessImpl debugProcess, ReferenceType classType) {
classType.methods().stream().filter(Method::isConstructor).forEach(cons -> {
Location loc = cons.location();
BreakpointRequest breakpointRequest = debugProcess.getRequestsManager().createBreakpointRequest(this, loc);
myRequests.add(breakpointRequest);
});
if (!myIsBackgroundMode || myIsBackgroundTrackingEnabled) {
enable();
}
}
@Override
public void reload() {
}
void delete() {
myIsDeleted = true;
}
@Override
public void dispose() {
}
@Override
public boolean processLocatableEvent(SuspendContextCommandImpl action, LocatableEvent event)
throws EventProcessingException {
try {
SuspendContextImpl suspendContext = action.getSuspendContext();
if (suspendContext != null) {
final MemoryViewDebugProcessData data = suspendContext.getDebugProcess().getUserData(MemoryViewDebugProcessData.KEY);
ObjectReference thisRef = getThisObject(suspendContext, event);
if (thisRef.referenceType().name().equals(myClassName) && data != null) {
thisRef.disableCollection();
myTrackedObjects.add(thisRef);
data.getTrackedStacks().addStack(thisRef, StackFrameItem.createFrames(suspendContext, false));
}
}
}
catch (EvaluateException e) {
return false;
}
if (myTrackedObjects.size() >= TRACKED_INSTANCES_LIMIT) {
disable();
}
return false;
}
void enable() {
if (!myIsEnabled && !myIsDeleted) {
myRequests.forEach(EventRequest::enable);
myIsEnabled = true;
}
}
void disable() {
if (myIsEnabled && !myIsDeleted) {
myRequests.forEach(EventRequest::disable);
myIsEnabled = false;
}
}
}
}
| java/debugger/impl/src/com/intellij/debugger/memory/tracking/ConstructorInstancesTracker.java | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.memory.tracking;
import com.intellij.debugger.DebuggerManager;
import com.intellij.debugger.engine.DebugProcess;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.SuspendContextImpl;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.events.DebuggerCommandImpl;
import com.intellij.debugger.engine.events.SuspendContextCommandImpl;
import com.intellij.debugger.memory.component.InstancesTracker;
import com.intellij.debugger.memory.component.MemoryViewDebugProcessData;
import com.intellij.debugger.memory.event.InstancesTrackerListener;
import com.intellij.debugger.memory.utils.StackFrameItem;
import com.intellij.debugger.ui.breakpoints.JavaLineBreakpointType;
import com.intellij.debugger.ui.breakpoints.LineBreakpoint;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.xdebugger.XDebugSession;
import com.intellij.xdebugger.XDebuggerManager;
import com.intellij.xdebugger.breakpoints.XBreakpoint;
import com.intellij.xdebugger.impl.XDebuggerManagerImpl;
import com.intellij.xdebugger.impl.breakpoints.LineBreakpointState;
import com.intellij.xdebugger.impl.breakpoints.XLineBreakpointImpl;
import com.sun.jdi.Location;
import com.sun.jdi.Method;
import com.sun.jdi.ObjectReference;
import com.sun.jdi.ReferenceType;
import com.sun.jdi.event.LocatableEvent;
import com.sun.jdi.request.BreakpointRequest;
import com.sun.jdi.request.EventRequest;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.java.debugger.breakpoints.properties.JavaLineBreakpointProperties;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
public class ConstructorInstancesTracker implements TrackerForNewInstances, Disposable, BackgroundTracker {
private static final int TRACKED_INSTANCES_LIMIT = 2000;
private final ReferenceType myReference;
private final Project myProject;
private final MyConstructorBreakpoints myBreakpoint;
@Nullable
private HashSet<ObjectReference> myNewObjects = null;
@NotNull
private HashSet<ObjectReference> myTrackedObjects = new HashSet<>();
private volatile boolean myIsBackgroundMode;
private volatile boolean myIsBackgroundTrackingEnabled;
public ConstructorInstancesTracker(@NotNull ReferenceType ref,
@NotNull XDebugSession debugSession,
@NotNull InstancesTracker instancesTracker) {
myReference = ref;
myProject = debugSession.getProject();
myIsBackgroundTrackingEnabled = instancesTracker.isBackgroundTrackingEnabled();
final DebugProcessImpl debugProcess = (DebugProcessImpl)DebuggerManager.getInstance(myProject)
.getDebugProcess(debugSession.getDebugProcess().getProcessHandler());
instancesTracker.addTrackerListener(new InstancesTrackerListener() {
@Override
public void backgroundTrackingValueChanged(boolean newState) {
if (myIsBackgroundTrackingEnabled != newState) {
myIsBackgroundTrackingEnabled = newState;
debugProcess.getManagerThread().schedule(new DebuggerCommandImpl() {
@Override
protected void action() throws Exception {
if (newState) {
myBreakpoint.enable();
}
else {
myBreakpoint.disable();
}
}
});
}
}
}, this);
final JavaLineBreakpointType breakPointType = new JavaLineBreakpointType();
final XBreakpoint bpn = new XLineBreakpointImpl<>(breakPointType,
((XDebuggerManagerImpl)XDebuggerManager.getInstance(myProject))
.getBreakpointManager(),
new JavaLineBreakpointProperties(),
new LineBreakpointState<>());
myBreakpoint = new MyConstructorBreakpoints(myProject, bpn);
myBreakpoint.createRequestForPreparedClass(debugProcess, myReference);
Disposer.register(myBreakpoint, () -> debugProcess.getManagerThread().schedule(new DebuggerCommandImpl() {
@Override
protected void action() throws Exception {
disable();
debugProcess.getRequestsManager().deleteRequest(myBreakpoint);
myBreakpoint.delete();
}
}));
}
public void obsolete() {
if (myNewObjects != null) {
myNewObjects.forEach(ObjectReference::enableCollection);
}
myNewObjects = null;
if (!myIsBackgroundMode || myIsBackgroundTrackingEnabled) {
myBreakpoint.enable();
}
final XDebugSession session = XDebuggerManager.getInstance(myProject).getCurrentSession();
if (session != null) {
final DebugProcess process = DebuggerManager.getInstance(myProject).getDebugProcess(session.getDebugProcess().getProcessHandler());
final MemoryViewDebugProcessData data = process.getUserData(MemoryViewDebugProcessData.KEY);
if (data != null) {
data.getTrackedStacks().release();
}
}
}
public void commitTracked() {
myNewObjects = myTrackedObjects;
myTrackedObjects = new HashSet<>();
}
@NotNull
@Override
public List<ObjectReference> getNewInstances() {
return myNewObjects == null ? Collections.EMPTY_LIST : new ArrayList<>(myNewObjects);
}
@Override
public int getCount() {
return myNewObjects == null ? 0 : myNewObjects.size();
}
public void enable() {
myBreakpoint.enable();
}
public void disable() {
myBreakpoint.disable();
}
@Override
public boolean isReady() {
return myNewObjects != null;
}
@Override
public void dispose() {
Disposer.dispose(myBreakpoint);
}
@Override
public void setBackgroundMode(boolean isBackgroundMode) {
if (myIsBackgroundMode == isBackgroundMode) {
return;
}
myIsBackgroundMode = isBackgroundMode;
if (isBackgroundMode) {
doEnableBackgroundMode();
}
else {
doDisableBackgroundMode();
}
}
private void doDisableBackgroundMode() {
myBreakpoint.enable();
}
private void doEnableBackgroundMode() {
if (!myIsBackgroundTrackingEnabled) {
myBreakpoint.disable();
}
}
private final class MyConstructorBreakpoints extends LineBreakpoint<JavaLineBreakpointProperties> implements Disposable {
private final List<BreakpointRequest> myRequests = new ArrayList<>();
private volatile boolean myIsEnabled = false;
private volatile boolean myIsDeleted = false;
MyConstructorBreakpoints(Project project, XBreakpoint xBreakpoint) {
super(project, xBreakpoint);
setVisible(false);
}
@Override
protected void createRequestForPreparedClass(DebugProcessImpl debugProcess, ReferenceType classType) {
classType.methods().stream().filter(Method::isConstructor).forEach(cons -> {
Location loc = cons.location();
BreakpointRequest breakpointRequest = debugProcess.getRequestsManager().createBreakpointRequest(this, loc);
myRequests.add(breakpointRequest);
});
if (!myIsBackgroundMode || myIsBackgroundTrackingEnabled) {
enable();
}
}
@Override
public void reload() {
}
void delete() {
myIsDeleted = true;
}
@Override
public void dispose() {
}
@Override
public boolean processLocatableEvent(SuspendContextCommandImpl action, LocatableEvent event)
throws EventProcessingException {
try {
SuspendContextImpl suspendContext = action.getSuspendContext();
if (suspendContext != null) {
final MemoryViewDebugProcessData data = suspendContext.getDebugProcess().getUserData(MemoryViewDebugProcessData.KEY);
ObjectReference thisRef = getThisObject(suspendContext, event);
if (myReference.equals(thisRef.referenceType()) && data != null) {
thisRef.disableCollection();
myTrackedObjects.add(thisRef);
data.getTrackedStacks().addStack(thisRef, StackFrameItem.createFrames(suspendContext, false));
}
}
}
catch (EvaluateException e) {
return false;
}
if (myTrackedObjects.size() >= TRACKED_INSTANCES_LIMIT) {
disable();
}
return false;
}
void enable() {
if (!myIsEnabled && !myIsDeleted) {
myRequests.forEach(EventRequest::enable);
myIsEnabled = true;
}
}
void disable() {
if (myIsEnabled && !myIsDeleted) {
myRequests.forEach(EventRequest::disable);
myIsEnabled = false;
}
}
}
}
| IDEA-170771 Avoid storing of a ReferenceType in the tracker
| java/debugger/impl/src/com/intellij/debugger/memory/tracking/ConstructorInstancesTracker.java | IDEA-170771 Avoid storing of a ReferenceType in the tracker |
|
Java | apache-2.0 | 161f172ce5467bc5cddd0f73136dab4bb287d9d6 | 0 | jimma/wss4j,jimma/wss4j,apache/wss4j,clibois/wss4j,asoldano/wss4j,apache/wss4j,clibois/wss4j,asoldano/wss4j | test/wssec/TestMSFTWebService.java | /*
* Copyright 2003-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package wssec;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.apache.ws.axis.samples.wssec.msft.MSFTGetVersion;
/**
* MSFT Web service Test Case
* <p/>
*
* @author Davanum Srinivas ([email protected])
*/
public class TestMSFTWebService extends TestCase {
/**
* TestMSFTWebService constructor
* <p/>
*
* @param name name of the test
*/
public TestMSFTWebService(String name) {
super(name);
}
/**
* JUnit suite
* <p/>
*
* @return a junit test suite
*/
public static Test suite() {
return new TestSuite(TestMSFTWebService.class);
}
/**
* Main method
* <p/>
*
* @param args command line args
*/
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
public void testUserNameToken() throws Exception {
MSFTGetVersion.main(new String[]{});
}
}
| Delete outdated example. Pls look at interop and test directories
for working examples.
git-svn-id: 10bc45916fe30ae642aa5037c9a4b05727bba413@240697 13f79535-47bb-0310-9956-ffa450edef68
| test/wssec/TestMSFTWebService.java | Delete outdated example. Pls look at interop and test directories for working examples. |
||
Java | apache-2.0 | ed0324b0fa4265806bc7a2b950fbb3c2a1e403de | 0 | ModernMT/MMT,letconex/MMT,letconex/MMT,ModernMT/MMT,letconex/MMT,letconex/MMT,letconex/MMT,letconex/MMT,ModernMT/MMT,ModernMT/MMT,ModernMT/MMT | package eu.modernmt.facade;
import eu.modernmt.engine.Engine;
import eu.modernmt.model.corpus.BilingualCorpus;
import eu.modernmt.model.corpus.Corpus;
import eu.modernmt.model.corpus.impl.parallel.ParallelFileCorpus;
import eu.modernmt.processing.ProcessingException;
import eu.modernmt.training.CleaningPipeline;
import eu.modernmt.training.PreprocessingPipeline;
import eu.modernmt.training.partitioning.FilesCorporaPartition;
import eu.modernmt.training.preprocessing.CorpusWriter;
import eu.modernmt.training.preprocessing.PlainTextWriter;
import eu.modernmt.training.preprocessing.VocabularyEncoderWriter;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
/**
* Created by davide on 17/08/16.
*/
public class TrainingFacade {
static {
Engine.class.getClass(); // Load engine class for its static initialization
}
public static final int DEFAULT_PARTITION_SIZE = 1200;
public static class TrainingOptions {
public int partitionSize = DEFAULT_PARTITION_SIZE;
public File developmentPartition = null;
public File testPartition = null;
public File vocabulary = null;
}
public void clean(List<BilingualCorpus> bilingualCorpora, File outputDirectory) throws IOException {
BilingualCorpus sample = bilingualCorpora.get(0);
final Locale sourceLanguage = sample.getSourceLanguage();
final Locale targetLanguage = sample.getTargetLanguage();
CleaningPipeline cleaningPipeline = new CleaningPipeline(corpus ->
new ParallelFileCorpus(outputDirectory, corpus.getName(), sourceLanguage, targetLanguage),
sourceLanguage, targetLanguage);
bilingualCorpora.forEach(cleaningPipeline::add);
FileUtils.deleteDirectory(outputDirectory);
FileUtils.forceMkdir(outputDirectory);
cleaningPipeline.process();
}
public void preprocess(List<BilingualCorpus> bilingualCorpora, List<Corpus> monolingualCorpora, Locale sourceLanguage,
Locale targetLanguage, File destFolder) throws ProcessingException, IOException {
preprocess(bilingualCorpora, monolingualCorpora, sourceLanguage, targetLanguage, destFolder, new TrainingOptions());
}
public void preprocess(List<BilingualCorpus> bilingualCorpora, List<Corpus> monolingualCorpora, Locale sourceLanguage,
Locale targetLanguage, File destFolder, TrainingOptions options) throws ProcessingException, IOException {
FilesCorporaPartition mainPartition = new FilesCorporaPartition(destFolder);
CorpusWriter writer;
if (options.vocabulary == null)
writer = new PlainTextWriter();
else
writer = new VocabularyEncoderWriter(options.vocabulary);
PreprocessingPipeline pipeline = new PreprocessingPipeline(mainPartition, sourceLanguage, targetLanguage, writer);
FileUtils.deleteDirectory(destFolder);
if (options.developmentPartition != null) {
FileUtils.deleteDirectory(options.developmentPartition);
pipeline.addExtraPartition(new FilesCorporaPartition(options.developmentPartition, options.partitionSize));
}
if (options.testPartition != null) {
FileUtils.deleteDirectory(options.testPartition);
pipeline.addExtraPartition(new FilesCorporaPartition(options.testPartition, options.partitionSize));
}
pipeline.process(bilingualCorpora, monolingualCorpora);
}
}
| src/core/src/main/java/eu/modernmt/facade/TrainingFacade.java | package eu.modernmt.facade;
import eu.modernmt.model.corpus.BilingualCorpus;
import eu.modernmt.model.corpus.Corpus;
import eu.modernmt.model.corpus.impl.parallel.ParallelFileCorpus;
import eu.modernmt.processing.ProcessingException;
import eu.modernmt.training.CleaningPipeline;
import eu.modernmt.training.PreprocessingPipeline;
import eu.modernmt.training.partitioning.FilesCorporaPartition;
import eu.modernmt.training.preprocessing.CorpusWriter;
import eu.modernmt.training.preprocessing.PlainTextWriter;
import eu.modernmt.training.preprocessing.VocabularyEncoderWriter;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
/**
* Created by davide on 17/08/16.
*/
public class TrainingFacade {
public static final int DEFAULT_PARTITION_SIZE = 1200;
public static class TrainingOptions {
public int partitionSize = DEFAULT_PARTITION_SIZE;
public File developmentPartition = null;
public File testPartition = null;
public File vocabulary = null;
}
public void clean(List<BilingualCorpus> bilingualCorpora, File outputDirectory) throws IOException {
BilingualCorpus sample = bilingualCorpora.get(0);
final Locale sourceLanguage = sample.getSourceLanguage();
final Locale targetLanguage = sample.getTargetLanguage();
CleaningPipeline cleaningPipeline = new CleaningPipeline(corpus ->
new ParallelFileCorpus(outputDirectory, corpus.getName(), sourceLanguage, targetLanguage),
sourceLanguage, targetLanguage);
bilingualCorpora.forEach(cleaningPipeline::add);
FileUtils.deleteDirectory(outputDirectory);
FileUtils.forceMkdir(outputDirectory);
cleaningPipeline.process();
}
public void preprocess(List<BilingualCorpus> bilingualCorpora, List<Corpus> monolingualCorpora, Locale sourceLanguage,
Locale targetLanguage, File destFolder) throws ProcessingException, IOException {
preprocess(bilingualCorpora, monolingualCorpora, sourceLanguage, targetLanguage, destFolder, new TrainingOptions());
}
public void preprocess(List<BilingualCorpus> bilingualCorpora, List<Corpus> monolingualCorpora, Locale sourceLanguage,
Locale targetLanguage, File destFolder, TrainingOptions options) throws ProcessingException, IOException {
FilesCorporaPartition mainPartition = new FilesCorporaPartition(destFolder);
CorpusWriter writer;
if (options.vocabulary == null)
writer = new PlainTextWriter();
else
writer = new VocabularyEncoderWriter(options.vocabulary);
PreprocessingPipeline pipeline = new PreprocessingPipeline(mainPartition, sourceLanguage, targetLanguage, writer);
FileUtils.deleteDirectory(destFolder);
if (options.developmentPartition != null) {
FileUtils.deleteDirectory(options.developmentPartition);
pipeline.addExtraPartition(new FilesCorporaPartition(options.developmentPartition, options.partitionSize));
}
if (options.testPartition != null) {
FileUtils.deleteDirectory(options.testPartition);
pipeline.addExtraPartition(new FilesCorporaPartition(options.testPartition, options.partitionSize));
}
pipeline.process(bilingualCorpora, monolingualCorpora);
}
}
| Solved missing Engine initialization in TrainingPipeline
| src/core/src/main/java/eu/modernmt/facade/TrainingFacade.java | Solved missing Engine initialization in TrainingPipeline |
|
Java | apache-2.0 | d952258679a6541195e42e2ed428f9ae6a07f900 | 0 | ThiagoGarciaAlves/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,allotria/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,vvv1559/intellij-community,vvv1559/intellij-community,semonte/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,da1z/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,semonte/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,ibinti/intellij-community,asedunov/intellij-community,FHannes/intellij-community,allotria/intellij-community,da1z/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,youdonghai/intellij-community,da1z/intellij-community,signed/intellij-community,da1z/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,semonte/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,allotria/intellij-community,FHannes/intellij-community,xfournet/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,semonte/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,apixandru/intellij-community,signed/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,allotria/intellij-community,da1z/intellij-community,da1z/intellij-community,vvv1559/intellij-community,semonte/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,signed/intellij-community,signed/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,allotria/intellij-community,signed/intellij-community,signed/intellij-community,semonte/intellij-community,asedunov/intellij-community,asedunov/intellij-community,xfournet/intellij-community,allotria/intellij-community,youdonghai/intellij-community,allotria/intellij-community,signed/intellij-community,apixandru/intellij-community,FHannes/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,signed/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,FHannes/intellij-community,asedunov/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,da1z/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,semonte/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,xfournet/intellij-community,asedunov/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,asedunov/intellij-community,da1z/intellij-community,allotria/intellij-community,apixandru/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,allotria/intellij-community,FHannes/intellij-community,semonte/intellij-community,FHannes/intellij-community,da1z/intellij-community,youdonghai/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,signed/intellij-community,FHannes/intellij-community,xfournet/intellij-community,signed/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,semonte/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiTypesUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.Function;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* User: anna
*/
public class PsiDiamondTypeUtil {
private static final Logger LOG = Logger.getInstance("#" + PsiDiamondTypeUtil.class.getName());
private PsiDiamondTypeUtil() {
}
public static boolean canCollapseToDiamond(final PsiNewExpression expression,
final PsiNewExpression context,
@Nullable final PsiType expectedType) {
return canCollapseToDiamond(expression, context, expectedType, false);
}
public static boolean canChangeContextForDiamond(final PsiNewExpression expression, final PsiType expectedType) {
final PsiNewExpression copy = (PsiNewExpression)expression.copy();
return canCollapseToDiamond(copy, copy, expectedType, true);
}
private static boolean canCollapseToDiamond(final PsiNewExpression expression,
final PsiNewExpression context,
@Nullable final PsiType expectedType,
boolean skipDiamonds) {
if (PsiUtil.getLanguageLevel(context).isAtLeast(LanguageLevel.JDK_1_7)) {
final PsiJavaCodeReferenceElement classReference = expression.getClassOrAnonymousClassReference();
if (classReference != null) {
final PsiReferenceParameterList parameterList = classReference.getParameterList();
if (parameterList != null) {
final PsiTypeElement[] typeElements = parameterList.getTypeParameterElements();
if (typeElements.length > 0) {
if (!skipDiamonds && typeElements.length == 1 && typeElements[0].getType() instanceof PsiDiamondType) return false;
final PsiDiamondTypeImpl.DiamondInferenceResult inferenceResult = PsiDiamondTypeImpl.resolveInferredTypes(expression, context);
if (inferenceResult.getErrorMessage() == null) {
final List<PsiType> types = inferenceResult.getInferredTypes();
PsiType[] typeArguments = null;
if (expectedType instanceof PsiClassType) {
typeArguments = ((PsiClassType)expectedType).getParameters();
}
if (typeArguments == null) {
typeArguments = parameterList.getTypeArguments();
}
if (types.size() == typeArguments.length) {
final PsiMethod method = expression.resolveMethod();
final PsiElement resolve = classReference.resolve();
if (resolve instanceof PsiClass) {
final PsiTypeParameter[] typeParameters = ((PsiClass)resolve).getTypeParameters();
return areTypeArgumentsRedundant(typeArguments, expression, true, method, typeParameters);
}
}
return true;
}
}
}
}
}
return false;
}
public static PsiElement replaceExplicitWithDiamond(PsiElement psiElement) {
if (psiElement instanceof PsiReferenceParameterList) {
if (!FileModificationService.getInstance().prepareFileForWrite(psiElement.getContainingFile())) return psiElement;
final PsiNewExpression expression =
(PsiNewExpression)JavaPsiFacade.getElementFactory(psiElement.getProject()).createExpressionFromText("new a<>()", psiElement);
final PsiJavaCodeReferenceElement classReference = expression.getClassReference();
LOG.assertTrue(classReference != null);
final PsiReferenceParameterList parameterList = classReference.getParameterList();
LOG.assertTrue(parameterList != null);
return psiElement.replace(parameterList);
}
return psiElement;
}
public static PsiElement replaceDiamondWithExplicitTypes(PsiElement element) {
final PsiElement parent = element.getParent();
if (!(parent instanceof PsiJavaCodeReferenceElement)) {
return parent;
}
final PsiJavaCodeReferenceElement javaCodeReferenceElement = (PsiJavaCodeReferenceElement) parent;
final StringBuilder text = new StringBuilder();
text.append(javaCodeReferenceElement.getQualifiedName());
text.append('<');
final PsiNewExpression newExpression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class);
final PsiDiamondType.DiamondInferenceResult result = PsiDiamondTypeImpl.resolveInferredTypesNoCheck(newExpression, newExpression);
text.append(StringUtil.join(result.getInferredTypes(), new Function<PsiType, String>() {
@Override
public String fun(PsiType psiType) {
return psiType.getCanonicalText();
}
}, ","));
text.append('>');
final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(element.getProject());
final PsiJavaCodeReferenceElement newReference = elementFactory.createReferenceFromText(text.toString(), element);
return CodeStyleManager.getInstance(javaCodeReferenceElement.getProject()).reformat(javaCodeReferenceElement.replace(newReference));
}
public static PsiExpression expandTopLevelDiamondsInside(PsiExpression expr) {
if (expr instanceof PsiNewExpression) {
final PsiJavaCodeReferenceElement classReference = ((PsiNewExpression)expr).getClassReference();
if (classReference != null) {
final PsiReferenceParameterList parameterList = classReference.getParameterList();
if (parameterList != null) {
final PsiTypeElement[] typeParameterElements = parameterList.getTypeParameterElements();
if (typeParameterElements.length == 1 && typeParameterElements[0].getType() instanceof PsiDiamondType) {
return (PsiExpression)replaceDiamondWithExplicitTypes(parameterList).getParent();
}
}
}
}
return expr;
}
public static String getCollapsedType(PsiType type, PsiElement context) {
String typeText = type.getCanonicalText();
if (PsiUtil.isLanguageLevel7OrHigher(context)) {
final int idx = typeText.indexOf('<');
if (idx >= 0) {
return typeText.substring(0, idx) + "<>";
}
}
return typeText;
}
public static boolean areTypeArgumentsRedundant(PsiType[] typeArguments,
PsiCallExpression expression,
boolean constructorRef,
@Nullable PsiMethod method,
PsiTypeParameter[] typeParameters) {
try {
final PsiElement copy;
final PsiType typeByParent = PsiTypesUtil.getExpectedTypeByParent(expression);
if (typeByParent != null) {
final String arrayInitializer = "new " + typeByParent.getCanonicalText() + "[]{0}";
final Project project = expression.getProject();
final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(project).getElementFactory();
PsiNewExpression newExpr = (PsiNewExpression)elementFactory.createExpressionFromText(arrayInitializer, expression);
//ensure refs to inner classes are collapsed to avoid raw types (container type would be raw in qualified text)
newExpr = (PsiNewExpression)JavaCodeStyleManager.getInstance(project).shortenClassReferences(newExpr);
final PsiArrayInitializerExpression initializer = newExpr.getArrayInitializer();
LOG.assertTrue(initializer != null);
copy = initializer.getInitializers()[0].replace(expression);
}
else {
final PsiExpressionList argumentList = expression.getArgumentList();
final int offset = (argumentList != null ? argumentList : expression).getTextRange().getStartOffset();
final PsiCall call = LambdaUtil.treeWalkUp(expression);
if (call != null) {
final PsiCall callCopy = LambdaUtil.copyTopLevelCall(call);
copy = callCopy != null ? callCopy.findElementAt(offset - call.getTextRange().getStartOffset()) : null;
}
else {
final InjectedLanguageManager injectedLanguageManager = InjectedLanguageManager.getInstance(expression.getProject());
if (injectedLanguageManager.getInjectionHost(expression) != null) {
return false;
}
final PsiFile containingFile = expression.getContainingFile();
final PsiFile fileCopy = (PsiFile)containingFile.copy();
copy = fileCopy.findElementAt(offset);
if (method != null && method.getContainingFile() == containingFile) {
final PsiElement startMethodElementInCopy = fileCopy.findElementAt(method.getTextOffset());
method = PsiTreeUtil.getParentOfType(startMethodElementInCopy, PsiMethod.class);
if (method == null) {
//lombok generated builder
return false;
}
}
}
}
final PsiCallExpression exprCopy = PsiTreeUtil.getParentOfType(copy, PsiCallExpression.class, false);
if (exprCopy != null) {
final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(exprCopy.getProject()).getElementFactory();
if (constructorRef) {
if (!(exprCopy instanceof PsiNewExpression) || !isInferenceEquivalent(typeArguments, elementFactory, (PsiNewExpression)exprCopy)) {
return false;
}
}
else {
LOG.assertTrue(method != null);
if (!isInferenceEquivalent(typeArguments, elementFactory, exprCopy, method, typeParameters)) {
return false;
}
}
}
}
catch (IncorrectOperationException e) {
LOG.info(e);
return false;
}
return true;
}
private static boolean isInferenceEquivalent(PsiType[] typeArguments,
PsiElementFactory elementFactory,
PsiCallExpression exprCopy,
PsiMethod method,
PsiTypeParameter[] typeParameters) throws IncorrectOperationException {
PsiReferenceParameterList list = ((PsiCallExpression)elementFactory.createExpressionFromText("foo()", null)).getTypeArgumentList();
exprCopy.getTypeArgumentList().replace(list);
final JavaResolveResult copyResult = exprCopy.resolveMethodGenerics();
if (method != copyResult.getElement()) return false;
final PsiSubstitutor psiSubstitutor = copyResult.getSubstitutor();
for (int i = 0, length = typeParameters.length; i < length; i++) {
PsiTypeParameter typeParameter = typeParameters[i];
final PsiType inferredType = psiSubstitutor.getSubstitutionMap().get(typeParameter);
if (!typeArguments[i].equals(inferredType)) {
return false;
}
if (PsiUtil.resolveClassInType(method.getReturnType()) == typeParameter && PsiPrimitiveType.getUnboxedType(inferredType) != null) {
return false;
}
}
return true;
}
private static boolean isInferenceEquivalent(PsiType[] typeArguments,
PsiElementFactory elementFactory,
PsiNewExpression exprCopy) throws IncorrectOperationException {
final PsiJavaCodeReferenceElement collapsedClassReference = ((PsiNewExpression)elementFactory.createExpressionFromText("new A<>()", null)).getClassOrAnonymousClassReference();
LOG.assertTrue(collapsedClassReference != null);
final PsiReferenceParameterList diamondParameterList = collapsedClassReference.getParameterList();
LOG.assertTrue(diamondParameterList != null);
final PsiJavaCodeReferenceElement classReference = exprCopy.getClassOrAnonymousClassReference();
LOG.assertTrue(classReference != null);
final PsiReferenceParameterList parameterList = classReference.getParameterList();
LOG.assertTrue(parameterList != null);
parameterList.replace(diamondParameterList);
final PsiType[] inferredArgs = classReference.getParameterList().getTypeArguments();
if (typeArguments.length != inferredArgs.length) {
return false;
}
for (int i = 0; i < typeArguments.length; i++) {
PsiType typeArgument = typeArguments[i];
if (inferredArgs[i] instanceof PsiWildcardType) {
final PsiWildcardType wildcardType = (PsiWildcardType)inferredArgs[i];
final PsiType bound = wildcardType.getBound();
if (bound != null) {
if (wildcardType.isExtends()) {
if (bound.isAssignableFrom(typeArgument)) continue;
}
else {
if (typeArgument.isAssignableFrom(bound)) continue;
}
}
}
if (!typeArgument.equals(inferredArgs[i])) {
return false;
}
}
return true;
}
}
| java/java-psi-impl/src/com/intellij/psi/impl/PsiDiamondTypeUtil.java | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiTypesUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.Function;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* User: anna
*/
public class PsiDiamondTypeUtil {
private static final Logger LOG = Logger.getInstance("#" + PsiDiamondTypeUtil.class.getName());
private PsiDiamondTypeUtil() {
}
public static boolean canCollapseToDiamond(final PsiNewExpression expression,
final PsiNewExpression context,
@Nullable final PsiType expectedType) {
return canCollapseToDiamond(expression, context, expectedType, false);
}
public static boolean canChangeContextForDiamond(final PsiNewExpression expression, final PsiType expectedType) {
final PsiNewExpression copy = (PsiNewExpression)expression.copy();
return canCollapseToDiamond(copy, copy, expectedType, true);
}
private static boolean canCollapseToDiamond(final PsiNewExpression expression,
final PsiNewExpression context,
@Nullable final PsiType expectedType,
boolean skipDiamonds) {
if (PsiUtil.getLanguageLevel(context).isAtLeast(LanguageLevel.JDK_1_7)) {
final PsiJavaCodeReferenceElement classReference = expression.getClassOrAnonymousClassReference();
if (classReference != null) {
final PsiReferenceParameterList parameterList = classReference.getParameterList();
if (parameterList != null) {
final PsiTypeElement[] typeElements = parameterList.getTypeParameterElements();
if (typeElements.length > 0) {
if (!skipDiamonds && typeElements.length == 1 && typeElements[0].getType() instanceof PsiDiamondType) return false;
final PsiDiamondTypeImpl.DiamondInferenceResult inferenceResult = PsiDiamondTypeImpl.resolveInferredTypes(expression, context);
if (inferenceResult.getErrorMessage() == null) {
final List<PsiType> types = inferenceResult.getInferredTypes();
PsiType[] typeArguments = null;
if (expectedType instanceof PsiClassType) {
typeArguments = ((PsiClassType)expectedType).getParameters();
}
if (typeArguments == null) {
typeArguments = parameterList.getTypeArguments();
}
if (types.size() == typeArguments.length) {
final PsiMethod method = expression.resolveMethod();
final PsiElement resolve = classReference.resolve();
if (resolve instanceof PsiClass) {
final PsiTypeParameter[] typeParameters = ((PsiClass)resolve).getTypeParameters();
return areTypeArgumentsRedundant(typeArguments, expression, true, method, typeParameters);
}
}
return true;
}
}
}
}
}
return false;
}
public static PsiElement replaceExplicitWithDiamond(PsiElement psiElement) {
if (psiElement instanceof PsiReferenceParameterList) {
if (!FileModificationService.getInstance().prepareFileForWrite(psiElement.getContainingFile())) return psiElement;
final PsiNewExpression expression =
(PsiNewExpression)JavaPsiFacade.getElementFactory(psiElement.getProject()).createExpressionFromText("new a<>()", psiElement);
final PsiJavaCodeReferenceElement classReference = expression.getClassReference();
LOG.assertTrue(classReference != null);
final PsiReferenceParameterList parameterList = classReference.getParameterList();
LOG.assertTrue(parameterList != null);
return psiElement.replace(parameterList);
}
return psiElement;
}
public static PsiElement replaceDiamondWithExplicitTypes(PsiElement element) {
final PsiElement parent = element.getParent();
if (!(parent instanceof PsiJavaCodeReferenceElement)) {
return parent;
}
final PsiJavaCodeReferenceElement javaCodeReferenceElement = (PsiJavaCodeReferenceElement) parent;
final StringBuilder text = new StringBuilder();
text.append(javaCodeReferenceElement.getQualifiedName());
text.append('<');
final PsiNewExpression newExpression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class);
final PsiDiamondType.DiamondInferenceResult result = PsiDiamondTypeImpl.resolveInferredTypesNoCheck(newExpression, newExpression);
text.append(StringUtil.join(result.getInferredTypes(), new Function<PsiType, String>() {
@Override
public String fun(PsiType psiType) {
return psiType.getCanonicalText();
}
}, ","));
text.append('>');
final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(element.getProject());
final PsiJavaCodeReferenceElement newReference = elementFactory.createReferenceFromText(text.toString(), element);
return CodeStyleManager.getInstance(javaCodeReferenceElement.getProject()).reformat(javaCodeReferenceElement.replace(newReference));
}
public static PsiExpression expandTopLevelDiamondsInside(PsiExpression expr) {
if (expr instanceof PsiNewExpression) {
final PsiJavaCodeReferenceElement classReference = ((PsiNewExpression)expr).getClassReference();
if (classReference != null) {
final PsiReferenceParameterList parameterList = classReference.getParameterList();
if (parameterList != null) {
final PsiTypeElement[] typeParameterElements = parameterList.getTypeParameterElements();
if (typeParameterElements.length == 1 && typeParameterElements[0].getType() instanceof PsiDiamondType) {
return (PsiExpression)replaceDiamondWithExplicitTypes(parameterList).getParent();
}
}
}
}
return expr;
}
public static String getCollapsedType(PsiType type, PsiElement context) {
String typeText = type.getCanonicalText();
if (PsiUtil.isLanguageLevel7OrHigher(context)) {
final int idx = typeText.indexOf('<');
if (idx >= 0) {
return typeText.substring(0, idx) + "<>";
}
}
return typeText;
}
public static boolean areTypeArgumentsRedundant(PsiType[] typeArguments,
PsiCallExpression expression,
boolean constructorRef,
@Nullable PsiMethod method,
PsiTypeParameter[] typeParameters) {
try {
final PsiElement copy;
final PsiType typeByParent = PsiTypesUtil.getExpectedTypeByParent(expression);
if (typeByParent != null) {
final String arrayInitializer = "new " + typeByParent.getCanonicalText() + "[]{0}";
final Project project = expression.getProject();
final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(project).getElementFactory();
PsiNewExpression newExpr = (PsiNewExpression)elementFactory.createExpressionFromText(arrayInitializer, expression);
//ensure refs to inner classes are collapsed to avoid raw types (container type would be raw in qualified text)
newExpr = (PsiNewExpression)JavaCodeStyleManager.getInstance(project).shortenClassReferences(newExpr);
final PsiArrayInitializerExpression initializer = newExpr.getArrayInitializer();
LOG.assertTrue(initializer != null);
copy = initializer.getInitializers()[0].replace(expression);
}
else {
final PsiExpressionList argumentList = expression.getArgumentList();
final int offset = (argumentList != null ? argumentList : expression).getTextRange().getStartOffset();
final PsiCall call = LambdaUtil.treeWalkUp(expression);
if (call != null) {
final PsiCall callCopy = LambdaUtil.copyTopLevelCall(call);
copy = callCopy != null ? callCopy.findElementAt(offset - call.getTextRange().getStartOffset()) : null;
}
else {
final PsiFile containingFile = expression.getContainingFile();
final PsiFile fileCopy = (PsiFile)containingFile.copy();
copy = fileCopy.findElementAt(offset);
if (method != null && method.getContainingFile() == containingFile) {
final PsiElement startMethodElementInCopy = fileCopy.findElementAt(method.getTextOffset());
method = PsiTreeUtil.getParentOfType(startMethodElementInCopy, PsiMethod.class);
if (method == null) {
//lombok generated builder
return false;
}
}
}
}
final PsiCallExpression exprCopy = PsiTreeUtil.getParentOfType(copy, PsiCallExpression.class, false);
if (exprCopy != null) {
final PsiElementFactory elementFactory = JavaPsiFacade.getInstance(exprCopy.getProject()).getElementFactory();
if (constructorRef) {
if (!(exprCopy instanceof PsiNewExpression) || !isInferenceEquivalent(typeArguments, elementFactory, (PsiNewExpression)exprCopy)) {
return false;
}
}
else {
LOG.assertTrue(method != null);
if (!isInferenceEquivalent(typeArguments, elementFactory, exprCopy, method, typeParameters)) {
return false;
}
}
}
}
catch (IncorrectOperationException e) {
LOG.info(e);
return false;
}
return true;
}
private static boolean isInferenceEquivalent(PsiType[] typeArguments,
PsiElementFactory elementFactory,
PsiCallExpression exprCopy,
PsiMethod method,
PsiTypeParameter[] typeParameters) throws IncorrectOperationException {
PsiReferenceParameterList list = ((PsiCallExpression)elementFactory.createExpressionFromText("foo()", null)).getTypeArgumentList();
exprCopy.getTypeArgumentList().replace(list);
final JavaResolveResult copyResult = exprCopy.resolveMethodGenerics();
if (method != copyResult.getElement()) return false;
final PsiSubstitutor psiSubstitutor = copyResult.getSubstitutor();
for (int i = 0, length = typeParameters.length; i < length; i++) {
PsiTypeParameter typeParameter = typeParameters[i];
final PsiType inferredType = psiSubstitutor.getSubstitutionMap().get(typeParameter);
if (!typeArguments[i].equals(inferredType)) {
return false;
}
if (PsiUtil.resolveClassInType(method.getReturnType()) == typeParameter && PsiPrimitiveType.getUnboxedType(inferredType) != null) {
return false;
}
}
return true;
}
private static boolean isInferenceEquivalent(PsiType[] typeArguments,
PsiElementFactory elementFactory,
PsiNewExpression exprCopy) throws IncorrectOperationException {
final PsiJavaCodeReferenceElement collapsedClassReference = ((PsiNewExpression)elementFactory.createExpressionFromText("new A<>()", null)).getClassOrAnonymousClassReference();
LOG.assertTrue(collapsedClassReference != null);
final PsiReferenceParameterList diamondParameterList = collapsedClassReference.getParameterList();
LOG.assertTrue(diamondParameterList != null);
final PsiJavaCodeReferenceElement classReference = exprCopy.getClassOrAnonymousClassReference();
LOG.assertTrue(classReference != null);
final PsiReferenceParameterList parameterList = classReference.getParameterList();
LOG.assertTrue(parameterList != null);
parameterList.replace(diamondParameterList);
final PsiType[] inferredArgs = classReference.getParameterList().getTypeArguments();
if (typeArguments.length != inferredArgs.length) {
return false;
}
for (int i = 0; i < typeArguments.length; i++) {
PsiType typeArgument = typeArguments[i];
if (inferredArgs[i] instanceof PsiWildcardType) {
final PsiWildcardType wildcardType = (PsiWildcardType)inferredArgs[i];
final PsiType bound = wildcardType.getBound();
if (bound != null) {
if (wildcardType.isExtends()) {
if (bound.isAssignableFrom(typeArgument)) continue;
}
else {
if (typeArgument.isAssignableFrom(bound)) continue;
}
}
}
if (!typeArgument.equals(inferredArgs[i])) {
return false;
}
}
return true;
}
}
| disable redundant args for injected fragments
EA-89605 - Throwable: ExceptionUtil.currentStackTrace
| java/java-psi-impl/src/com/intellij/psi/impl/PsiDiamondTypeUtil.java | disable redundant args for injected fragments EA-89605 - Throwable: ExceptionUtil.currentStackTrace |
|
Java | apache-2.0 | 8aca504c863d2351258e33e86b2c18e9365bbf99 | 0 | JackyMai/elasticsearch,qwerty4030/elasticsearch,uschindler/elasticsearch,winstonewert/elasticsearch,mikemccand/elasticsearch,ThiagoGarciaAlves/elasticsearch,umeshdangat/elasticsearch,nknize/elasticsearch,njlawton/elasticsearch,sneivandt/elasticsearch,IanvsPoplicola/elasticsearch,s1monw/elasticsearch,artnowo/elasticsearch,markwalkom/elasticsearch,a2lin/elasticsearch,wuranbo/elasticsearch,scottsom/elasticsearch,masaruh/elasticsearch,IanvsPoplicola/elasticsearch,mohit/elasticsearch,artnowo/elasticsearch,glefloch/elasticsearch,robin13/elasticsearch,brandonkearby/elasticsearch,gingerwizard/elasticsearch,jprante/elasticsearch,Shepard1212/elasticsearch,StefanGor/elasticsearch,elasticdog/elasticsearch,rajanm/elasticsearch,sneivandt/elasticsearch,naveenhooda2000/elasticsearch,jimczi/elasticsearch,masaruh/elasticsearch,alexshadow007/elasticsearch,strapdata/elassandra,rlugojr/elasticsearch,nilabhsagar/elasticsearch,uschindler/elasticsearch,elasticdog/elasticsearch,kalimatas/elasticsearch,jimczi/elasticsearch,nknize/elasticsearch,LeoYao/elasticsearch,Shepard1212/elasticsearch,bawse/elasticsearch,wuranbo/elasticsearch,JSCooke/elasticsearch,nilabhsagar/elasticsearch,Helen-Zhao/elasticsearch,wenpos/elasticsearch,wenpos/elasticsearch,scorpionvicky/elasticsearch,JackyMai/elasticsearch,henakamaMSFT/elasticsearch,njlawton/elasticsearch,scorpionvicky/elasticsearch,gingerwizard/elasticsearch,obourgain/elasticsearch,ZTE-PaaS/elasticsearch,ThiagoGarciaAlves/elasticsearch,s1monw/elasticsearch,Shepard1212/elasticsearch,strapdata/elassandra,henakamaMSFT/elasticsearch,wangtuo/elasticsearch,nilabhsagar/elasticsearch,mohit/elasticsearch,lks21c/elasticsearch,Stacey-Gammon/elasticsearch,obourgain/elasticsearch,rlugojr/elasticsearch,mohit/elasticsearch,LeoYao/elasticsearch,wangtuo/elasticsearch,wenpos/elasticsearch,JSCooke/elasticsearch,kalimatas/elasticsearch,mikemccand/elasticsearch,wenpos/elasticsearch,gingerwizard/elasticsearch,i-am-Nathan/elasticsearch,geidies/elasticsearch,nilabhsagar/elasticsearch,MisterAndersen/elasticsearch,pozhidaevak/elasticsearch,gingerwizard/elasticsearch,masaruh/elasticsearch,Stacey-Gammon/elasticsearch,elasticdog/elasticsearch,GlenRSmith/elasticsearch,fred84/elasticsearch,GlenRSmith/elasticsearch,maddin2016/elasticsearch,Stacey-Gammon/elasticsearch,lks21c/elasticsearch,nezirus/elasticsearch,lks21c/elasticsearch,geidies/elasticsearch,gingerwizard/elasticsearch,scorpionvicky/elasticsearch,markwalkom/elasticsearch,vroyer/elasticassandra,fred84/elasticsearch,nezirus/elasticsearch,wuranbo/elasticsearch,GlenRSmith/elasticsearch,StefanGor/elasticsearch,njlawton/elasticsearch,uschindler/elasticsearch,nknize/elasticsearch,scottsom/elasticsearch,njlawton/elasticsearch,naveenhooda2000/elasticsearch,maddin2016/elasticsearch,C-Bish/elasticsearch,shreejay/elasticsearch,HonzaKral/elasticsearch,LewayneNaidoo/elasticsearch,uschindler/elasticsearch,coding0011/elasticsearch,mortonsykes/elasticsearch,rajanm/elasticsearch,coding0011/elasticsearch,MisterAndersen/elasticsearch,ThiagoGarciaAlves/elasticsearch,jprante/elasticsearch,HonzaKral/elasticsearch,ThiagoGarciaAlves/elasticsearch,ZTE-PaaS/elasticsearch,HonzaKral/elasticsearch,ZTE-PaaS/elasticsearch,JackyMai/elasticsearch,gfyoung/elasticsearch,Stacey-Gammon/elasticsearch,vroyer/elassandra,MisterAndersen/elasticsearch,masaruh/elasticsearch,shreejay/elasticsearch,mohit/elasticsearch,scottsom/elasticsearch,mortonsykes/elasticsearch,Helen-Zhao/elasticsearch,gingerwizard/elasticsearch,IanvsPoplicola/elasticsearch,qwerty4030/elasticsearch,jprante/elasticsearch,strapdata/elassandra,robin13/elasticsearch,ThiagoGarciaAlves/elasticsearch,umeshdangat/elasticsearch,MisterAndersen/elasticsearch,s1monw/elasticsearch,nknize/elasticsearch,vroyer/elasticassandra,rlugojr/elasticsearch,rajanm/elasticsearch,strapdata/elassandra,scorpionvicky/elasticsearch,bawse/elasticsearch,strapdata/elassandra,a2lin/elasticsearch,gfyoung/elasticsearch,bawse/elasticsearch,JSCooke/elasticsearch,HonzaKral/elasticsearch,a2lin/elasticsearch,robin13/elasticsearch,mohit/elasticsearch,wuranbo/elasticsearch,StefanGor/elasticsearch,nezirus/elasticsearch,winstonewert/elasticsearch,ThiagoGarciaAlves/elasticsearch,i-am-Nathan/elasticsearch,winstonewert/elasticsearch,JackyMai/elasticsearch,i-am-Nathan/elasticsearch,brandonkearby/elasticsearch,naveenhooda2000/elasticsearch,qwerty4030/elasticsearch,Helen-Zhao/elasticsearch,jprante/elasticsearch,mortonsykes/elasticsearch,brandonkearby/elasticsearch,mjason3/elasticsearch,markwalkom/elasticsearch,geidies/elasticsearch,Shepard1212/elasticsearch,wangtuo/elasticsearch,fernandozhu/elasticsearch,kalimatas/elasticsearch,robin13/elasticsearch,obourgain/elasticsearch,s1monw/elasticsearch,StefanGor/elasticsearch,scottsom/elasticsearch,i-am-Nathan/elasticsearch,rajanm/elasticsearch,jprante/elasticsearch,nazarewk/elasticsearch,IanvsPoplicola/elasticsearch,qwerty4030/elasticsearch,mikemccand/elasticsearch,qwerty4030/elasticsearch,nezirus/elasticsearch,glefloch/elasticsearch,LewayneNaidoo/elasticsearch,markwalkom/elasticsearch,sneivandt/elasticsearch,nazarewk/elasticsearch,LeoYao/elasticsearch,masaruh/elasticsearch,markwalkom/elasticsearch,vroyer/elasticassandra,mikemccand/elasticsearch,StefanGor/elasticsearch,LewayneNaidoo/elasticsearch,nazarewk/elasticsearch,LeoYao/elasticsearch,a2lin/elasticsearch,gfyoung/elasticsearch,Helen-Zhao/elasticsearch,JackyMai/elasticsearch,nknize/elasticsearch,naveenhooda2000/elasticsearch,brandonkearby/elasticsearch,C-Bish/elasticsearch,obourgain/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,JSCooke/elasticsearch,geidies/elasticsearch,wenpos/elasticsearch,wuranbo/elasticsearch,shreejay/elasticsearch,mjason3/elasticsearch,henakamaMSFT/elasticsearch,s1monw/elasticsearch,kalimatas/elasticsearch,C-Bish/elasticsearch,henakamaMSFT/elasticsearch,pozhidaevak/elasticsearch,gingerwizard/elasticsearch,i-am-Nathan/elasticsearch,IanvsPoplicola/elasticsearch,C-Bish/elasticsearch,sneivandt/elasticsearch,fernandozhu/elasticsearch,vroyer/elassandra,maddin2016/elasticsearch,alexshadow007/elasticsearch,umeshdangat/elasticsearch,uschindler/elasticsearch,coding0011/elasticsearch,robin13/elasticsearch,kalimatas/elasticsearch,fernandozhu/elasticsearch,LeoYao/elasticsearch,lks21c/elasticsearch,winstonewert/elasticsearch,geidies/elasticsearch,gfyoung/elasticsearch,brandonkearby/elasticsearch,artnowo/elasticsearch,glefloch/elasticsearch,rajanm/elasticsearch,glefloch/elasticsearch,LeoYao/elasticsearch,JSCooke/elasticsearch,bawse/elasticsearch,mikemccand/elasticsearch,umeshdangat/elasticsearch,scottsom/elasticsearch,alexshadow007/elasticsearch,umeshdangat/elasticsearch,MisterAndersen/elasticsearch,LeoYao/elasticsearch,scorpionvicky/elasticsearch,wangtuo/elasticsearch,shreejay/elasticsearch,coding0011/elasticsearch,elasticdog/elasticsearch,glefloch/elasticsearch,fred84/elasticsearch,njlawton/elasticsearch,C-Bish/elasticsearch,ZTE-PaaS/elasticsearch,artnowo/elasticsearch,mortonsykes/elasticsearch,jimczi/elasticsearch,gfyoung/elasticsearch,nazarewk/elasticsearch,mortonsykes/elasticsearch,bawse/elasticsearch,geidies/elasticsearch,lks21c/elasticsearch,maddin2016/elasticsearch,LewayneNaidoo/elasticsearch,wangtuo/elasticsearch,obourgain/elasticsearch,pozhidaevak/elasticsearch,henakamaMSFT/elasticsearch,pozhidaevak/elasticsearch,jimczi/elasticsearch,coding0011/elasticsearch,rajanm/elasticsearch,shreejay/elasticsearch,LewayneNaidoo/elasticsearch,maddin2016/elasticsearch,mjason3/elasticsearch,a2lin/elasticsearch,ZTE-PaaS/elasticsearch,winstonewert/elasticsearch,Helen-Zhao/elasticsearch,vroyer/elassandra,naveenhooda2000/elasticsearch,mjason3/elasticsearch,fred84/elasticsearch,pozhidaevak/elasticsearch,artnowo/elasticsearch,alexshadow007/elasticsearch,Shepard1212/elasticsearch,rlugojr/elasticsearch,fernandozhu/elasticsearch,Stacey-Gammon/elasticsearch,nezirus/elasticsearch,fred84/elasticsearch,fernandozhu/elasticsearch,elasticdog/elasticsearch,alexshadow007/elasticsearch,sneivandt/elasticsearch,rlugojr/elasticsearch,nilabhsagar/elasticsearch,markwalkom/elasticsearch,jimczi/elasticsearch,nazarewk/elasticsearch,mjason3/elasticsearch | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import static java.util.Collections.emptyList;
public class QueryParseContextTests extends ESTestCase {
private static NamedXContentRegistry xContentRegistry;
@BeforeClass
public static void init() {
xContentRegistry = new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedXContents());
}
@AfterClass
public static void cleanup() {
xContentRegistry = null;
}
private ThreadContext threadContext;
@Before
public void beforeTest() throws IOException {
this.threadContext = new ThreadContext(Settings.EMPTY);
DeprecationLogger.setThreadContext(threadContext);
}
@After
public void teardown() throws IOException {
DeprecationLogger.removeThreadContext(this.threadContext);
this.threadContext.close();
}
public void testParseTopLevelBuilder() throws IOException {
QueryBuilder query = new MatchQueryBuilder("foo", "bar");
String requestBody = "{ \"query\" : " + query.toString() + "}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
QueryBuilder actual = context.parseTopLevelQueryBuilder();
assertEquals(query, actual);
}
}
public void testParseTopLevelBuilderEmptyObject() throws IOException {
String requestBody = "{}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
QueryBuilder query = context.parseTopLevelQueryBuilder();
assertNull(query);
}
}
public void testParseTopLevelBuilderUnknownParameter() throws IOException {
String requestBody = "{ \"foo\" : \"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseTopLevelQueryBuilder());
assertEquals("request does not support [foo]", exception.getMessage());
}
}
public void testParseInnerQueryBuilder() throws IOException {
QueryBuilder query = new MatchQueryBuilder("foo", "bar");
String source = query.toString();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
QueryBuilder actual = context.parseInnerQueryBuilder();
assertEquals(query, actual);
}
}
public void testParseInnerQueryBuilderExceptions() throws IOException {
String source = "{ \"foo\": \"bar\" }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
parser.nextToken();
parser.nextToken(); // don't start with START_OBJECT to provoke exception
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
assertEquals("[_na] query malformed, must start with start_object", exception.getMessage());
}
source = "{}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> context.parseInnerQueryBuilder());
assertEquals("query malformed, empty clause found at [1:2]", exception.getMessage());
}
source = "{ \"foo\" : \"bar\" }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
assertEquals("[foo] query malformed, no start_object after query name", exception.getMessage());
}
source = "{ \"foo\" : {} }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
assertEquals("no [query] registered for [foo]", exception.getMessage());
}
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
}
| core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import static java.util.Collections.emptyList;
public class QueryParseContextTests extends ESTestCase {
private static NamedXContentRegistry xContentRegistry;
@BeforeClass
public static void init() {
xContentRegistry = new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedXContents());
}
private ThreadContext threadContext;
@Before
public void beforeTest() throws IOException {
this.threadContext = new ThreadContext(Settings.EMPTY);
DeprecationLogger.setThreadContext(threadContext);
}
@After
public void teardown() throws IOException {
DeprecationLogger.removeThreadContext(this.threadContext);
this.threadContext.close();
}
public void testParseTopLevelBuilder() throws IOException {
QueryBuilder query = new MatchQueryBuilder("foo", "bar");
String requestBody = "{ \"query\" : " + query.toString() + "}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
QueryBuilder actual = context.parseTopLevelQueryBuilder();
assertEquals(query, actual);
}
}
public void testParseTopLevelBuilderEmptyObject() throws IOException {
String requestBody = "{}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
QueryBuilder query = context.parseTopLevelQueryBuilder();
assertNull(query);
}
}
public void testParseTopLevelBuilderUnknownParameter() throws IOException {
String requestBody = "{ \"foo\" : \"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseTopLevelQueryBuilder());
assertEquals("request does not support [foo]", exception.getMessage());
}
}
public void testParseInnerQueryBuilder() throws IOException {
QueryBuilder query = new MatchQueryBuilder("foo", "bar");
String source = query.toString();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
QueryBuilder actual = context.parseInnerQueryBuilder();
assertEquals(query, actual);
}
}
public void testParseInnerQueryBuilderExceptions() throws IOException {
String source = "{ \"foo\": \"bar\" }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
parser.nextToken();
parser.nextToken(); // don't start with START_OBJECT to provoke exception
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
assertEquals("[_na] query malformed, must start with start_object", exception.getMessage());
}
source = "{}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> context.parseInnerQueryBuilder());
assertEquals("query malformed, empty clause found at [1:2]", exception.getMessage());
}
source = "{ \"foo\" : \"bar\" }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
assertEquals("[foo] query malformed, no start_object after query name", exception.getMessage());
}
source = "{ \"foo\" : {} }";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) {
QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT);
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
assertEquals("no [query] registered for [foo]", exception.getMessage());
}
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
}
| Clear static variable after suite
This was causing test failures:
https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+master+java9-periodic/1101/console
https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+master+dockeralpine-periodic/513/consoleFull
| core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java | Clear static variable after suite |
|
Java | apache-2.0 | 9096baf7d931d73ea2a38f1cbd8c28231cfc4b1c | 0 | AndreKR/elasticsearch,rmuir/elasticsearch,mm0/elasticsearch,StefanGor/elasticsearch,kaneshin/elasticsearch,thecocce/elasticsearch,MjAbuz/elasticsearch,Shepard1212/elasticsearch,kingaj/elasticsearch,dataduke/elasticsearch,alexbrasetvik/elasticsearch,lchennup/elasticsearch,Kakakakakku/elasticsearch,khiraiwa/elasticsearch,awislowski/elasticsearch,ajhalani/elasticsearch,alexkuk/elasticsearch,kingaj/elasticsearch,sposam/elasticsearch,ckclark/elasticsearch,infusionsoft/elasticsearch,MichaelLiZhou/elasticsearch,alexshadow007/elasticsearch,tsohil/elasticsearch,rlugojr/elasticsearch,mcku/elasticsearch,yynil/elasticsearch,vroyer/elasticassandra,F0lha/elasticsearch,raishiv/elasticsearch,maddin2016/elasticsearch,janmejay/elasticsearch,HonzaKral/elasticsearch,masaruh/elasticsearch,karthikjaps/elasticsearch,ImpressTV/elasticsearch,diendt/elasticsearch,geidies/elasticsearch,sdauletau/elasticsearch,boliza/elasticsearch,kimimj/elasticsearch,kubum/elasticsearch,vingupta3/elasticsearch,nazarewk/elasticsearch,davidvgalbraith/elasticsearch,hirdesh2008/elasticsearch,gfyoung/elasticsearch,girirajsharma/elasticsearch,StefanGor/elasticsearch,sreeramjayan/elasticsearch,gfyoung/elasticsearch,Clairebi/ElasticsearchClone,tsohil/elasticsearch,ydsakyclguozi/elasticsearch,javachengwc/elasticsearch,wangtuo/elasticsearch,jchampion/elasticsearch,jw0201/elastic,F0lha/elasticsearch,hydro2k/elasticsearch,nezirus/elasticsearch,koxa29/elasticsearch,wuranbo/elasticsearch,nrkkalyan/elasticsearch,brandonkearby/elasticsearch,kkirsche/elasticsearch,kunallimaye/elasticsearch,xingguang2013/elasticsearch,mute/elasticsearch,mbrukman/elasticsearch,wittyameta/elasticsearch,Asimov4/elasticsearch,alexbrasetvik/elasticsearch,huanzhong/elasticsearch,elasticdog/elasticsearch,lightslife/elasticsearch,likaiwalkman/elasticsearch,jw0201/elastic,alexbrasetvik/elasticsearch,dpursehouse/elasticsearch,JervyShi/elasticsearch,episerver/elasticsearch,bestwpw/elasticsearch,huypx1292/elasticsearch,fubuki/elasticsearch,ivansun1010/elasticsearch,IanvsPoplicola/elasticsearch,combinatorist/elasticsearch,shreejay/elasticsearch,jimhooker2002/elasticsearch,koxa29/elasticsearch,queirozfcom/elasticsearch,ydsakyclguozi/elasticsearch,mbrukman/elasticsearch,lydonchandra/elasticsearch,avikurapati/elasticsearch,qwerty4030/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,MisterAndersen/elasticsearch,iacdingping/elasticsearch,linglaiyao1314/elasticsearch,kcompher/elasticsearch,maddin2016/elasticsearch,geidies/elasticsearch,hafkensite/elasticsearch,PhaedrusTheGreek/elasticsearch,NBSW/elasticsearch,sdauletau/elasticsearch,sreeramjayan/elasticsearch,mohit/elasticsearch,fooljohnny/elasticsearch,wangyuxue/elasticsearch,kunallimaye/elasticsearch,masterweb121/elasticsearch,kingaj/elasticsearch,wangtuo/elasticsearch,achow/elasticsearch,ImpressTV/elasticsearch,rajanm/elasticsearch,petmit/elasticsearch,Uiho/elasticsearch,xingguang2013/elasticsearch,diendt/elasticsearch,strapdata/elassandra,MetSystem/elasticsearch,loconsolutions/elasticsearch,onegambler/elasticsearch,kaneshin/elasticsearch,likaiwalkman/elasticsearch,lchennup/elasticsearch,chrismwendt/elasticsearch,MaineC/elasticsearch,hydro2k/elasticsearch,brandonkearby/elasticsearch,yynil/elasticsearch,ThiagoGarciaAlves/elasticsearch,rlugojr/elasticsearch,slavau/elasticsearch,wayeast/elasticsearch,adrianbk/elasticsearch,MetSystem/elasticsearch,Asimov4/elasticsearch,glefloch/elasticsearch,masterweb121/elasticsearch,ckclark/elasticsearch,sscarduzio/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,dpursehouse/elasticsearch,pritishppai/elasticsearch,tcucchietti/elasticsearch,kalimatas/elasticsearch,ajhalani/elasticsearch,IanvsPoplicola/elasticsearch,anti-social/elasticsearch,tkssharma/elasticsearch,naveenhooda2000/elasticsearch,amaliujia/elasticsearch,zhiqinghuang/elasticsearch,xuzha/elasticsearch,chrismwendt/elasticsearch,vinsonlou/elasticsearch,hanswang/elasticsearch,masaruh/elasticsearch,drewr/elasticsearch,pablocastro/elasticsearch,njlawton/elasticsearch,mm0/elasticsearch,Fsero/elasticsearch,huypx1292/elasticsearch,jw0201/elastic,coding0011/elasticsearch,overcome/elasticsearch,weipinghe/elasticsearch,strapdata/elassandra-test,kalburgimanjunath/elasticsearch,NBSW/elasticsearch,F0lha/elasticsearch,janmejay/elasticsearch,mjhennig/elasticsearch,ImpressTV/elasticsearch,elancom/elasticsearch,kingaj/elasticsearch,socialrank/elasticsearch,andrejserafim/elasticsearch,wimvds/elasticsearch,AshishThakur/elasticsearch,sc0ttkclark/elasticsearch,tkssharma/elasticsearch,beiske/elasticsearch,fekaputra/elasticsearch,caengcjd/elasticsearch,hirdesh2008/elasticsearch,Ansh90/elasticsearch,lmtwga/elasticsearch,skearns64/elasticsearch,codebunt/elasticsearch,gingerwizard/elasticsearch,areek/elasticsearch,glefloch/elasticsearch,masaruh/elasticsearch,sauravmondallive/elasticsearch,maddin2016/elasticsearch,caengcjd/elasticsearch,umeshdangat/elasticsearch,hechunwen/elasticsearch,Shepard1212/elasticsearch,HonzaKral/elasticsearch,strapdata/elassandra-test,fooljohnny/elasticsearch,mohit/elasticsearch,knight1128/elasticsearch,snikch/elasticsearch,xuzha/elasticsearch,likaiwalkman/elasticsearch,clintongormley/elasticsearch,JervyShi/elasticsearch,fred84/elasticsearch,dpursehouse/elasticsearch,gingerwizard/elasticsearch,LeoYao/elasticsearch,yuy168/elasticsearch,ThalaivaStars/OrgRepo1,sauravmondallive/elasticsearch,infusionsoft/elasticsearch,mohsinh/elasticsearch,btiernay/elasticsearch,lmtwga/elasticsearch,fred84/elasticsearch,a2lin/elasticsearch,djschny/elasticsearch,janmejay/elasticsearch,markllama/elasticsearch,boliza/elasticsearch,camilojd/elasticsearch,alexkuk/elasticsearch,scottsom/elasticsearch,JackyMai/elasticsearch,himanshuag/elasticsearch,raishiv/elasticsearch,wayeast/elasticsearch,jango2015/elasticsearch,chirilo/elasticsearch,abhijitiitr/es,glefloch/elasticsearch,myelin/elasticsearch,ThiagoGarciaAlves/elasticsearch,Ansh90/elasticsearch,coding0011/elasticsearch,kaneshin/elasticsearch,heng4fun/elasticsearch,robin13/elasticsearch,Kakakakakku/elasticsearch,kimimj/elasticsearch,truemped/elasticsearch,mbrukman/elasticsearch,springning/elasticsearch,javachengwc/elasticsearch,yynil/elasticsearch,pranavraman/elasticsearch,lzo/elasticsearch-1,likaiwalkman/elasticsearch,jpountz/elasticsearch,hanswang/elasticsearch,apepper/elasticsearch,markwalkom/elasticsearch,HonzaKral/elasticsearch,infusionsoft/elasticsearch,nezirus/elasticsearch,mmaracic/elasticsearch,hanswang/elasticsearch,s1monw/elasticsearch,mkis-/elasticsearch,davidvgalbraith/elasticsearch,wayeast/elasticsearch,pablocastro/elasticsearch,scorpionvicky/elasticsearch,JervyShi/elasticsearch,peschlowp/elasticsearch,snikch/elasticsearch,jchampion/elasticsearch,likaiwalkman/elasticsearch,strapdata/elassandra-test,zeroctu/elasticsearch,awislowski/elasticsearch,cwurm/elasticsearch,fooljohnny/elasticsearch,yanjunh/elasticsearch,kenshin233/elasticsearch,golubev/elasticsearch,yuy168/elasticsearch,petmit/elasticsearch,wayeast/elasticsearch,Shekharrajak/elasticsearch,andrestc/elasticsearch,smflorentino/elasticsearch,onegambler/elasticsearch,polyfractal/elasticsearch,a2lin/elasticsearch,bestwpw/elasticsearch,vietlq/elasticsearch,sdauletau/elasticsearch,brwe/elasticsearch,Collaborne/elasticsearch,zhaocloud/elasticsearch,markllama/elasticsearch,pablocastro/elasticsearch,tebriel/elasticsearch,cnfire/elasticsearch-1,dongjoon-hyun/elasticsearch,codebunt/elasticsearch,mjason3/elasticsearch,cwurm/elasticsearch,nknize/elasticsearch,vroyer/elasticassandra,mmaracic/elasticsearch,mapr/elasticsearch,ivansun1010/elasticsearch,sauravmondallive/elasticsearch,wuranbo/elasticsearch,ckclark/elasticsearch,Uiho/elasticsearch,lightslife/elasticsearch,palecur/elasticsearch,strapdata/elassandra5-rc,petabytedata/elasticsearch,StefanGor/elasticsearch,iamjakob/elasticsearch,myelin/elasticsearch,vingupta3/elasticsearch,himanshuag/elasticsearch,petabytedata/elasticsearch,feiqitian/elasticsearch,xuzha/elasticsearch,Rygbee/elasticsearch,F0lha/elasticsearch,andrewvc/elasticsearch,nrkkalyan/elasticsearch,ydsakyclguozi/elasticsearch,boliza/elasticsearch,NBSW/elasticsearch,AleksKochev/elasticsearch,geidies/elasticsearch,loconsolutions/elasticsearch,mortonsykes/elasticsearch,Uiho/elasticsearch,cnfire/elasticsearch-1,awislowski/elasticsearch,Shepard1212/elasticsearch,aparo/elasticsearch,vingupta3/elasticsearch,vietlq/elasticsearch,dataduke/elasticsearch,kalimatas/elasticsearch,rhoml/elasticsearch,luiseduardohdbackup/elasticsearch,jsgao0/elasticsearch,Brijeshrpatel9/elasticsearch,ESamir/elasticsearch,snikch/elasticsearch,sauravmondallive/elasticsearch,pranavraman/elasticsearch,Collaborne/elasticsearch,sreeramjayan/elasticsearch,EasonYi/elasticsearch,davidvgalbraith/elasticsearch,ulkas/elasticsearch,Shekharrajak/elasticsearch,TonyChai24/ESSource,lydonchandra/elasticsearch,jaynblue/elasticsearch,overcome/elasticsearch,rmuir/elasticsearch,andrestc/elasticsearch,djschny/elasticsearch,kevinkluge/elasticsearch,knight1128/elasticsearch,nomoa/elasticsearch,kkirsche/elasticsearch,Widen/elasticsearch,zeroctu/elasticsearch,ImpressTV/elasticsearch,libosu/elasticsearch,marcuswr/elasticsearch-dateline,jeteve/elasticsearch,zkidkid/elasticsearch,sjohnr/elasticsearch,Clairebi/ElasticsearchClone,golubev/elasticsearch,qwerty4030/elasticsearch,jprante/elasticsearch,clintongormley/elasticsearch,iamjakob/elasticsearch,kingaj/elasticsearch,alexbrasetvik/elasticsearch,yongminxia/elasticsearch,franklanganke/elasticsearch,drewr/elasticsearch,yanjunh/elasticsearch,koxa29/elasticsearch,uschindler/elasticsearch,beiske/elasticsearch,mapr/elasticsearch,jango2015/elasticsearch,kevinkluge/elasticsearch,wimvds/elasticsearch,amit-shar/elasticsearch,abibell/elasticsearch,camilojd/elasticsearch,Ansh90/elasticsearch,lks21c/elasticsearch,wangtuo/elasticsearch,kaneshin/elasticsearch,tahaemin/elasticsearch,scottsom/elasticsearch,mute/elasticsearch,socialrank/elasticsearch,Microsoft/elasticsearch,liweinan0423/elasticsearch,markllama/elasticsearch,rlugojr/elasticsearch,vroyer/elassandra,ouyangkongtong/elasticsearch,Rygbee/elasticsearch,overcome/elasticsearch,artnowo/elasticsearch,dataduke/elasticsearch,masterweb121/elasticsearch,mjason3/elasticsearch,elancom/elasticsearch,linglaiyao1314/elasticsearch,iamjakob/elasticsearch,rhoml/elasticsearch,adrianbk/elasticsearch,dylan8902/elasticsearch,markharwood/elasticsearch,zkidkid/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,golubev/elasticsearch,mgalushka/elasticsearch,hydro2k/elasticsearch,wbowling/elasticsearch,kalimatas/elasticsearch,jsgao0/elasticsearch,raishiv/elasticsearch,mmaracic/elasticsearch,polyfractal/elasticsearch,ivansun1010/elasticsearch,jimhooker2002/elasticsearch,nrkkalyan/elasticsearch,Chhunlong/elasticsearch,kubum/elasticsearch,zhaocloud/elasticsearch,marcuswr/elasticsearch-dateline,IanvsPoplicola/elasticsearch,tahaemin/elasticsearch,ivansun1010/elasticsearch,dantuffery/elasticsearch,humandb/elasticsearch,sc0ttkclark/elasticsearch,Rygbee/elasticsearch,kcompher/elasticsearch,beiske/elasticsearch,SergVro/elasticsearch,naveenhooda2000/elasticsearch,KimTaehee/elasticsearch,opendatasoft/elasticsearch,heng4fun/elasticsearch,maddin2016/elasticsearch,markwalkom/elasticsearch,petmit/elasticsearch,umeshdangat/elasticsearch,btiernay/elasticsearch,wbowling/elasticsearch,iamjakob/elasticsearch,jprante/elasticsearch,koxa29/elasticsearch,mjhennig/elasticsearch,sjohnr/elasticsearch,schonfeld/elasticsearch,himanshuag/elasticsearch,markharwood/elasticsearch,mcku/elasticsearch,Rygbee/elasticsearch,ricardocerq/elasticsearch,sc0ttkclark/elasticsearch,rmuir/elasticsearch,nrkkalyan/elasticsearch,robin13/elasticsearch,mute/elasticsearch,Helen-Zhao/elasticsearch,i-am-Nathan/elasticsearch,gingerwizard/elasticsearch,LewayneNaidoo/elasticsearch,kimimj/elasticsearch,sposam/elasticsearch,AshishThakur/elasticsearch,springning/elasticsearch,HarishAtGitHub/elasticsearch,mjhennig/elasticsearch,alexksikes/elasticsearch,sposam/elasticsearch,zhiqinghuang/elasticsearch,aparo/elasticsearch,kubum/elasticsearch,socialrank/elasticsearch,trangvh/elasticsearch,Liziyao/elasticsearch,martinstuga/elasticsearch,wbowling/elasticsearch,glefloch/elasticsearch,javachengwc/elasticsearch,aglne/elasticsearch,jango2015/elasticsearch,wayeast/elasticsearch,markharwood/elasticsearch,martinstuga/elasticsearch,smflorentino/elasticsearch,Flipkart/elasticsearch,strapdata/elassandra5-rc,caengcjd/elasticsearch,ZTE-PaaS/elasticsearch,sneivandt/elasticsearch,ricardocerq/elasticsearch,mohsinh/elasticsearch,liweinan0423/elasticsearch,pritishppai/elasticsearch,smflorentino/elasticsearch,mcku/elasticsearch,qwerty4030/elasticsearch,zhiqinghuang/elasticsearch,diendt/elasticsearch,LeoYao/elasticsearch,diendt/elasticsearch,LeoYao/elasticsearch,cnfire/elasticsearch-1,kenshin233/elasticsearch,mapr/elasticsearch,jprante/elasticsearch,jaynblue/elasticsearch,opendatasoft/elasticsearch,mnylen/elasticsearch,zeroctu/elasticsearch,khiraiwa/elasticsearch,dongjoon-hyun/elasticsearch,kalimatas/elasticsearch,franklanganke/elasticsearch,njlawton/elasticsearch,sauravmondallive/elasticsearch,ThalaivaStars/OrgRepo1,JervyShi/elasticsearch,obourgain/elasticsearch,YosuaMichael/elasticsearch,jango2015/elasticsearch,winstonewert/elasticsearch,opendatasoft/elasticsearch,iantruslove/elasticsearch,petabytedata/elasticsearch,thecocce/elasticsearch,caengcjd/elasticsearch,ckclark/elasticsearch,kevinkluge/elasticsearch,linglaiyao1314/elasticsearch,mm0/elasticsearch,mikemccand/elasticsearch,MaineC/elasticsearch,vrkansagara/elasticsearch,jeteve/elasticsearch,gfyoung/elasticsearch,fernandozhu/elasticsearch,weipinghe/elasticsearch,avikurapati/elasticsearch,zeroctu/elasticsearch,Charlesdong/elasticsearch,strapdata/elassandra-test,vorce/es-metrics,lmtwga/elasticsearch,mortonsykes/elasticsearch,chirilo/elasticsearch,xuzha/elasticsearch,ZTE-PaaS/elasticsearch,hanst/elasticsearch,mrorii/elasticsearch,kimchy/elasticsearch,kevinkluge/elasticsearch,acchen97/elasticsearch,schonfeld/elasticsearch,LewayneNaidoo/elasticsearch,easonC/elasticsearch,fooljohnny/elasticsearch,petmit/elasticsearch,kkirsche/elasticsearch,YosuaMichael/elasticsearch,fekaputra/elasticsearch,ouyangkongtong/elasticsearch,Ansh90/elasticsearch,lydonchandra/elasticsearch,mmaracic/elasticsearch,khiraiwa/elasticsearch,avikurapati/elasticsearch,naveenhooda2000/elasticsearch,tebriel/elasticsearch,vvcephei/elasticsearch,brwe/elasticsearch,AndreKR/elasticsearch,mnylen/elasticsearch,mohsinh/elasticsearch,acchen97/elasticsearch,btiernay/elasticsearch,fforbeck/elasticsearch,zhaocloud/elasticsearch,lightslife/elasticsearch,zeroctu/elasticsearch,elasticdog/elasticsearch,milodky/elasticsearch,nellicus/elasticsearch,xpandan/elasticsearch,javachengwc/elasticsearch,sc0ttkclark/elasticsearch,fubuki/elasticsearch,anti-social/elasticsearch,salyh/elasticsearch,aglne/elasticsearch,springning/elasticsearch,amaliujia/elasticsearch,schonfeld/elasticsearch,hanst/elasticsearch,pozhidaevak/elasticsearch,himanshuag/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,tcucchietti/elasticsearch,Rygbee/elasticsearch,achow/elasticsearch,Ansh90/elasticsearch,MisterAndersen/elasticsearch,achow/elasticsearch,mute/elasticsearch,chrismwendt/elasticsearch,tkssharma/elasticsearch,phani546/elasticsearch,yongminxia/elasticsearch,mohit/elasticsearch,anti-social/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,rlugojr/elasticsearch,abibell/elasticsearch,wuranbo/elasticsearch,strapdata/elassandra,JervyShi/elasticsearch,Flipkart/elasticsearch,sdauletau/elasticsearch,MjAbuz/elasticsearch,mrorii/elasticsearch,areek/elasticsearch,truemped/elasticsearch,AndreKR/elasticsearch,rhoml/elasticsearch,javachengwc/elasticsearch,fekaputra/elasticsearch,hydro2k/elasticsearch,dantuffery/elasticsearch,hirdesh2008/elasticsearch,truemped/elasticsearch,luiseduardohdbackup/elasticsearch,uschindler/elasticsearch,queirozfcom/elasticsearch,Chhunlong/elasticsearch,ZTE-PaaS/elasticsearch,salyh/elasticsearch,dongaihua/highlight-elasticsearch,VukDukic/elasticsearch,mbrukman/elasticsearch,djschny/elasticsearch,Brijeshrpatel9/elasticsearch,MichaelLiZhou/elasticsearch,KimTaehee/elasticsearch,karthikjaps/elasticsearch,KimTaehee/elasticsearch,geidies/elasticsearch,beiske/elasticsearch,fred84/elasticsearch,Flipkart/elasticsearch,dongaihua/highlight-elasticsearch,chirilo/elasticsearch,ThiagoGarciaAlves/elasticsearch,himanshuag/elasticsearch,ydsakyclguozi/elasticsearch,luiseduardohdbackup/elasticsearch,artnowo/elasticsearch,micpalmia/elasticsearch,huypx1292/elasticsearch,umeshdangat/elasticsearch,humandb/elasticsearch,TonyChai24/ESSource,vroyer/elasticassandra,Collaborne/elasticsearch,Shepard1212/elasticsearch,infusionsoft/elasticsearch,ESamir/elasticsearch,fekaputra/elasticsearch,amit-shar/elasticsearch,jbertouch/elasticsearch,acchen97/elasticsearch,AshishThakur/elasticsearch,xpandan/elasticsearch,markwalkom/elasticsearch,socialrank/elasticsearch,hafkensite/elasticsearch,wittyameta/elasticsearch,iantruslove/elasticsearch,franklanganke/elasticsearch,Fsero/elasticsearch,GlenRSmith/elasticsearch,jaynblue/elasticsearch,MjAbuz/elasticsearch,wenpos/elasticsearch,xpandan/elasticsearch,pozhidaevak/elasticsearch,kalburgimanjunath/elasticsearch,Ansh90/elasticsearch,PhaedrusTheGreek/elasticsearch,wimvds/elasticsearch,Chhunlong/elasticsearch,vvcephei/elasticsearch,khiraiwa/elasticsearch,abhijitiitr/es,GlenRSmith/elasticsearch,bawse/elasticsearch,lmtwga/elasticsearch,Siddartha07/elasticsearch,anti-social/elasticsearch,thecocce/elasticsearch,pablocastro/elasticsearch,gmarz/elasticsearch,bestwpw/elasticsearch,libosu/elasticsearch,Ansh90/elasticsearch,yuy168/elasticsearch,andrejserafim/elasticsearch,NBSW/elasticsearch,dongjoon-hyun/elasticsearch,gingerwizard/elasticsearch,mnylen/elasticsearch,TonyChai24/ESSource,weipinghe/elasticsearch,artnowo/elasticsearch,robin13/elasticsearch,kunallimaye/elasticsearch,btiernay/elasticsearch,boliza/elasticsearch,opendatasoft/elasticsearch,ricardocerq/elasticsearch,iantruslove/elasticsearch,s1monw/elasticsearch,henakamaMSFT/elasticsearch,palecur/elasticsearch,libosu/elasticsearch,weipinghe/elasticsearch,yynil/elasticsearch,alexkuk/elasticsearch,btiernay/elasticsearch,wenpos/elasticsearch,girirajsharma/elasticsearch,andrestc/elasticsearch,slavau/elasticsearch,uschindler/elasticsearch,achow/elasticsearch,ouyangkongtong/elasticsearch,lydonchandra/elasticsearch,koxa29/elasticsearch,huanzhong/elasticsearch,heng4fun/elasticsearch,dpursehouse/elasticsearch,yongminxia/elasticsearch,aparo/elasticsearch,skearns64/elasticsearch,elancom/elasticsearch,heng4fun/elasticsearch,ZTE-PaaS/elasticsearch,KimTaehee/elasticsearch,rajanm/elasticsearch,mbrukman/elasticsearch,vietlq/elasticsearch,mute/elasticsearch,sjohnr/elasticsearch,sneivandt/elasticsearch,bestwpw/elasticsearch,tsohil/elasticsearch,ThalaivaStars/OrgRepo1,queirozfcom/elasticsearch,feiqitian/elasticsearch,HarishAtGitHub/elasticsearch,amaliujia/elasticsearch,slavau/elasticsearch,mkis-/elasticsearch,Collaborne/elasticsearch,spiegela/elasticsearch,mnylen/elasticsearch,sposam/elasticsearch,dataduke/elasticsearch,btiernay/elasticsearch,sposam/elasticsearch,LewayneNaidoo/elasticsearch,vorce/es-metrics,kcompher/elasticsearch,szroland/elasticsearch,djschny/elasticsearch,amit-shar/elasticsearch,libosu/elasticsearch,humandb/elasticsearch,wittyameta/elasticsearch,polyfractal/elasticsearch,Asimov4/elasticsearch,iacdingping/elasticsearch,MichaelLiZhou/elasticsearch,acchen97/elasticsearch,JSCooke/elasticsearch,mmaracic/elasticsearch,Stacey-Gammon/elasticsearch,strapdata/elassandra-test,tahaemin/elasticsearch,skearns64/elasticsearch,areek/elasticsearch,MichaelLiZhou/elasticsearch,wbowling/elasticsearch,Shekharrajak/elasticsearch,Flipkart/elasticsearch,iacdingping/elasticsearch,huanzhong/elasticsearch,alexbrasetvik/elasticsearch,lchennup/elasticsearch,uschindler/elasticsearch,Charlesdong/elasticsearch,kalburgimanjunath/elasticsearch,cnfire/elasticsearch-1,drewr/elasticsearch,JSCooke/elasticsearch,libosu/elasticsearch,zhiqinghuang/elasticsearch,xingguang2013/elasticsearch,alexshadow007/elasticsearch,milodky/elasticsearch,kkirsche/elasticsearch,Microsoft/elasticsearch,opendatasoft/elasticsearch,cnfire/elasticsearch-1,apepper/elasticsearch,elancom/elasticsearch,tebriel/elasticsearch,njlawton/elasticsearch,F0lha/elasticsearch,awislowski/elasticsearch,mm0/elasticsearch,thecocce/elasticsearch,diendt/elasticsearch,ydsakyclguozi/elasticsearch,mjhennig/elasticsearch,jimhooker2002/elasticsearch,fubuki/elasticsearch,jsgao0/elasticsearch,mjhennig/elasticsearch,thecocce/elasticsearch,kevinkluge/elasticsearch,jimczi/elasticsearch,Stacey-Gammon/elasticsearch,markharwood/elasticsearch,schonfeld/elasticsearch,nezirus/elasticsearch,micpalmia/elasticsearch,ulkas/elasticsearch,elasticdog/elasticsearch,tahaemin/elasticsearch,Widen/elasticsearch,hechunwen/elasticsearch,vietlq/elasticsearch,hanst/elasticsearch,kalburgimanjunath/elasticsearch,caengcjd/elasticsearch,drewr/elasticsearch,salyh/elasticsearch,hanswang/elasticsearch,schonfeld/elasticsearch,s1monw/elasticsearch,EasonYi/elasticsearch,rmuir/elasticsearch,beiske/elasticsearch,linglaiyao1314/elasticsearch,kubum/elasticsearch,jaynblue/elasticsearch,andrestc/elasticsearch,mcku/elasticsearch,Uiho/elasticsearch,fernandozhu/elasticsearch,MetSystem/elasticsearch,strapdata/elassandra5-rc,tcucchietti/elasticsearch,GlenRSmith/elasticsearch,gingerwizard/elasticsearch,adrianbk/elasticsearch,tebriel/elasticsearch,abibell/elasticsearch,zhiqinghuang/elasticsearch,shreejay/elasticsearch,kevinkluge/elasticsearch,dylan8902/elasticsearch,himanshuag/elasticsearch,EasonYi/elasticsearch,xpandan/elasticsearch,MjAbuz/elasticsearch,bestwpw/elasticsearch,tsohil/elasticsearch,tsohil/elasticsearch,gingerwizard/elasticsearch,Helen-Zhao/elasticsearch,jango2015/elasticsearch,nilabhsagar/elasticsearch,obourgain/elasticsearch,overcome/elasticsearch,Shekharrajak/elasticsearch,jeteve/elasticsearch,knight1128/elasticsearch,rento19962/elasticsearch,Microsoft/elasticsearch,dataduke/elasticsearch,spiegela/elasticsearch,AleksKochev/elasticsearch,sjohnr/elasticsearch,dongjoon-hyun/elasticsearch,huypx1292/elasticsearch,Clairebi/ElasticsearchClone,SergVro/elasticsearch,nellicus/elasticsearch,mcku/elasticsearch,mm0/elasticsearch,andrewvc/elasticsearch,iantruslove/elasticsearch,s1monw/elasticsearch,kimimj/elasticsearch,Shekharrajak/elasticsearch,wenpos/elasticsearch,JSCooke/elasticsearch,rmuir/elasticsearch,dylan8902/elasticsearch,brandonkearby/elasticsearch,loconsolutions/elasticsearch,Charlesdong/elasticsearch,hechunwen/elasticsearch,coding0011/elasticsearch,strapdata/elassandra,boliza/elasticsearch,NBSW/elasticsearch,kkirsche/elasticsearch,lightslife/elasticsearch,fred84/elasticsearch,huanzhong/elasticsearch,Liziyao/elasticsearch,clintongormley/elasticsearch,wuranbo/elasticsearch,masterweb121/elasticsearch,thecocce/elasticsearch,weipinghe/elasticsearch,knight1128/elasticsearch,sarwarbhuiyan/elasticsearch,zhiqinghuang/elasticsearch,brwe/elasticsearch,geidies/elasticsearch,Flipkart/elasticsearch,likaiwalkman/elasticsearch,sarwarbhuiyan/elasticsearch,ImpressTV/elasticsearch,vinsonlou/elasticsearch,lks21c/elasticsearch,Charlesdong/elasticsearch,dylan8902/elasticsearch,palecur/elasticsearch,humandb/elasticsearch,clintongormley/elasticsearch,phani546/elasticsearch,adrianbk/elasticsearch,AshishThakur/elasticsearch,phani546/elasticsearch,ESamir/elasticsearch,ajhalani/elasticsearch,ouyangkongtong/elasticsearch,ricardocerq/elasticsearch,milodky/elasticsearch,MetSystem/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,luiseduardohdbackup/elasticsearch,mnylen/elasticsearch,fernandozhu/elasticsearch,onegambler/elasticsearch,tcucchietti/elasticsearch,kalimatas/elasticsearch,Helen-Zhao/elasticsearch,markllama/elasticsearch,MaineC/elasticsearch,GlenRSmith/elasticsearch,overcome/elasticsearch,dantuffery/elasticsearch,trangvh/elasticsearch,Kakakakakku/elasticsearch,pablocastro/elasticsearch,camilojd/elasticsearch,zkidkid/elasticsearch,xuzha/elasticsearch,fforbeck/elasticsearch,mgalushka/elasticsearch,anti-social/elasticsearch,ImpressTV/elasticsearch,alexksikes/elasticsearch,fubuki/elasticsearch,fernandozhu/elasticsearch,andrejserafim/elasticsearch,snikch/elasticsearch,kubum/elasticsearch,mohit/elasticsearch,alexbrasetvik/elasticsearch,F0lha/elasticsearch,yynil/elasticsearch,Kakakakakku/elasticsearch,Microsoft/elasticsearch,nazarewk/elasticsearch,pozhidaevak/elasticsearch,shreejay/elasticsearch,girirajsharma/elasticsearch,aglne/elasticsearch,infusionsoft/elasticsearch,brandonkearby/elasticsearch,knight1128/elasticsearch,episerver/elasticsearch,qwerty4030/elasticsearch,combinatorist/elasticsearch,onegambler/elasticsearch,iantruslove/elasticsearch,beiske/elasticsearch,sscarduzio/elasticsearch,nomoa/elasticsearch,sdauletau/elasticsearch,andrestc/elasticsearch,gmarz/elasticsearch,18098924759/elasticsearch,tkssharma/elasticsearch,Brijeshrpatel9/elasticsearch,jimhooker2002/elasticsearch,snikch/elasticsearch,dylan8902/elasticsearch,liweinan0423/elasticsearch,amaliujia/elasticsearch,martinstuga/elasticsearch,abibell/elasticsearch,sauravmondallive/elasticsearch,vrkansagara/elasticsearch,ThalaivaStars/OrgRepo1,sarwarbhuiyan/elasticsearch,nellicus/elasticsearch,acchen97/elasticsearch,iacdingping/elasticsearch,mute/elasticsearch,rento19962/elasticsearch,socialrank/elasticsearch,Widen/elasticsearch,naveenhooda2000/elasticsearch,jchampion/elasticsearch,yongminxia/elasticsearch,pritishppai/elasticsearch,ESamir/elasticsearch,hanst/elasticsearch,sc0ttkclark/elasticsearch,jeteve/elasticsearch,zhaocloud/elasticsearch,cwurm/elasticsearch,ulkas/elasticsearch,kingaj/elasticsearch,PhaedrusTheGreek/elasticsearch,uboness/elasticsearch,micpalmia/elasticsearch,artnowo/elasticsearch,gfyoung/elasticsearch,a2lin/elasticsearch,LeoYao/elasticsearch,strapdata/elassandra-test,henakamaMSFT/elasticsearch,alexshadow007/elasticsearch,jpountz/elasticsearch,beiske/elasticsearch,rento19962/elasticsearch,jaynblue/elasticsearch,infusionsoft/elasticsearch,ulkas/elasticsearch,rhoml/elasticsearch,camilojd/elasticsearch,davidvgalbraith/elasticsearch,Liziyao/elasticsearch,vingupta3/elasticsearch,cnfire/elasticsearch-1,codebunt/elasticsearch,szroland/elasticsearch,zhaocloud/elasticsearch,markllama/elasticsearch,adrianbk/elasticsearch,yuy168/elasticsearch,mikemccand/elasticsearch,skearns64/elasticsearch,smflorentino/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,18098924759/elasticsearch,kcompher/elasticsearch,pablocastro/elasticsearch,socialrank/elasticsearch,skearns64/elasticsearch,SergVro/elasticsearch,raishiv/elasticsearch,micpalmia/elasticsearch,tkssharma/elasticsearch,libosu/elasticsearch,gmarz/elasticsearch,markllama/elasticsearch,kcompher/elasticsearch,nknize/elasticsearch,MichaelLiZhou/elasticsearch,SergVro/elasticsearch,nomoa/elasticsearch,zhiqinghuang/elasticsearch,Collaborne/elasticsearch,Microsoft/elasticsearch,a2lin/elasticsearch,bawse/elasticsearch,pritishppai/elasticsearch,s1monw/elasticsearch,kenshin233/elasticsearch,truemped/elasticsearch,PhaedrusTheGreek/elasticsearch,ThiagoGarciaAlves/elasticsearch,markharwood/elasticsearch,kunallimaye/elasticsearch,Chhunlong/elasticsearch,iamjakob/elasticsearch,Widen/elasticsearch,pranavraman/elasticsearch,AleksKochev/elasticsearch,mrorii/elasticsearch,kimimj/elasticsearch,umeshdangat/elasticsearch,PhaedrusTheGreek/elasticsearch,obourgain/elasticsearch,MisterAndersen/elasticsearch,strapdata/elassandra,huypx1292/elasticsearch,MaineC/elasticsearch,andrestc/elasticsearch,kubum/elasticsearch,anti-social/elasticsearch,zeroctu/elasticsearch,elancom/elasticsearch,dataduke/elasticsearch,kunallimaye/elasticsearch,szroland/elasticsearch,queirozfcom/elasticsearch,Brijeshrpatel9/elasticsearch,masaruh/elasticsearch,synhershko/elasticsearch,markwalkom/elasticsearch,Charlesdong/elasticsearch,jimczi/elasticsearch,hafkensite/elasticsearch,yanjunh/elasticsearch,iacdingping/elasticsearch,Siddartha07/elasticsearch,Fsero/elasticsearch,abhijitiitr/es,mnylen/elasticsearch,ThiagoGarciaAlves/elasticsearch,elasticdog/elasticsearch,easonC/elasticsearch,hafkensite/elasticsearch,mikemccand/elasticsearch,rhoml/elasticsearch,nrkkalyan/elasticsearch,overcome/elasticsearch,mm0/elasticsearch,socialrank/elasticsearch,bestwpw/elasticsearch,wangtuo/elasticsearch,AleksKochev/elasticsearch,janmejay/elasticsearch,winstonewert/elasticsearch,chrismwendt/elasticsearch,mgalushka/elasticsearch,lchennup/elasticsearch,lks21c/elasticsearch,sreeramjayan/elasticsearch,caengcjd/elasticsearch,markwalkom/elasticsearch,rento19962/elasticsearch,sjohnr/elasticsearch,fooljohnny/elasticsearch,markharwood/elasticsearch,tsohil/elasticsearch,jpountz/elasticsearch,VukDukic/elasticsearch,chrismwendt/elasticsearch,wenpos/elasticsearch,ckclark/elasticsearch,pritishppai/elasticsearch,zeroctu/elasticsearch,dantuffery/elasticsearch,fekaputra/elasticsearch,a2lin/elasticsearch,Stacey-Gammon/elasticsearch,18098924759/elasticsearch,spiegela/elasticsearch,bawse/elasticsearch,xingguang2013/elasticsearch,tkssharma/elasticsearch,lydonchandra/elasticsearch,karthikjaps/elasticsearch,mortonsykes/elasticsearch,mkis-/elasticsearch,chirilo/elasticsearch,btiernay/elasticsearch,gfyoung/elasticsearch,IanvsPoplicola/elasticsearch,milodky/elasticsearch,mjason3/elasticsearch,rhoml/elasticsearch,wangyuxue/elasticsearch,javachengwc/elasticsearch,achow/elasticsearch,jango2015/elasticsearch,nomoa/elasticsearch,aparo/elasticsearch,dongjoon-hyun/elasticsearch,himanshuag/elasticsearch,LewayneNaidoo/elasticsearch,myelin/elasticsearch,salyh/elasticsearch,hafkensite/elasticsearch,ouyangkongtong/elasticsearch,mgalushka/elasticsearch,pozhidaevak/elasticsearch,janmejay/elasticsearch,i-am-Nathan/elasticsearch,hafkensite/elasticsearch,Rygbee/elasticsearch,LeoYao/elasticsearch,Widen/elasticsearch,synhershko/elasticsearch,karthikjaps/elasticsearch,HarishAtGitHub/elasticsearch,wittyameta/elasticsearch,amit-shar/elasticsearch,mapr/elasticsearch,cwurm/elasticsearch,MichaelLiZhou/elasticsearch,AndreKR/elasticsearch,slavau/elasticsearch,mjason3/elasticsearch,apepper/elasticsearch,jimhooker2002/elasticsearch,areek/elasticsearch,davidvgalbraith/elasticsearch,areek/elasticsearch,vroyer/elassandra,NBSW/elasticsearch,elancom/elasticsearch,awislowski/elasticsearch,mgalushka/elasticsearch,golubev/elasticsearch,mikemccand/elasticsearch,JackyMai/elasticsearch,camilojd/elasticsearch,sneivandt/elasticsearch,rlugojr/elasticsearch,lzo/elasticsearch-1,C-Bish/elasticsearch,Siddartha07/elasticsearch,hydro2k/elasticsearch,mohsinh/elasticsearch,wittyameta/elasticsearch,lchennup/elasticsearch,kaneshin/elasticsearch,queirozfcom/elasticsearch,shreejay/elasticsearch,MisterAndersen/elasticsearch,jchampion/elasticsearch,wbowling/elasticsearch,areek/elasticsearch,Clairebi/ElasticsearchClone,nezirus/elasticsearch,queirozfcom/elasticsearch,ajhalani/elasticsearch,camilojd/elasticsearch,linglaiyao1314/elasticsearch,YosuaMichael/elasticsearch,andrejserafim/elasticsearch,nilabhsagar/elasticsearch,zhaocloud/elasticsearch,phani546/elasticsearch,aglne/elasticsearch,codebunt/elasticsearch,C-Bish/elasticsearch,mjhennig/elasticsearch,diendt/elasticsearch,shreejay/elasticsearch,hirdesh2008/elasticsearch,tahaemin/elasticsearch,aparo/elasticsearch,springning/elasticsearch,C-Bish/elasticsearch,wimvds/elasticsearch,winstonewert/elasticsearch,onegambler/elasticsearch,YosuaMichael/elasticsearch,pozhidaevak/elasticsearch,kcompher/elasticsearch,martinstuga/elasticsearch,huanzhong/elasticsearch,wbowling/elasticsearch,fforbeck/elasticsearch,PhaedrusTheGreek/elasticsearch,koxa29/elasticsearch,masterweb121/elasticsearch,rajanm/elasticsearch,raishiv/elasticsearch,mgalushka/elasticsearch,avikurapati/elasticsearch,heng4fun/elasticsearch,kenshin233/elasticsearch,AshishThakur/elasticsearch,dylan8902/elasticsearch,aparo/elasticsearch,amit-shar/elasticsearch,Chhunlong/elasticsearch,kunallimaye/elasticsearch,vrkansagara/elasticsearch,alexshadow007/elasticsearch,ImpressTV/elasticsearch,MjAbuz/elasticsearch,hydro2k/elasticsearch,polyfractal/elasticsearch,MichaelLiZhou/elasticsearch,Brijeshrpatel9/elasticsearch,truemped/elasticsearch,phani546/elasticsearch,vvcephei/elasticsearch,scorpionvicky/elasticsearch,jimczi/elasticsearch,Stacey-Gammon/elasticsearch,jchampion/elasticsearch,combinatorist/elasticsearch,golubev/elasticsearch,robin13/elasticsearch,slavau/elasticsearch,lks21c/elasticsearch,Collaborne/elasticsearch,combinatorist/elasticsearch,gmarz/elasticsearch,EasonYi/elasticsearch,kkirsche/elasticsearch,wayeast/elasticsearch,mohsinh/elasticsearch,mortonsykes/elasticsearch,szroland/elasticsearch,masterweb121/elasticsearch,GlenRSmith/elasticsearch,wayeast/elasticsearch,vrkansagara/elasticsearch,wittyameta/elasticsearch,nrkkalyan/elasticsearch,luiseduardohdbackup/elasticsearch,springning/elasticsearch,jimhooker2002/elasticsearch,sposam/elasticsearch,smflorentino/elasticsearch,lmtwga/elasticsearch,pritishppai/elasticsearch,wangyuxue/elasticsearch,fred84/elasticsearch,njlawton/elasticsearch,Liziyao/elasticsearch,ivansun1010/elasticsearch,Uiho/elasticsearch,zkidkid/elasticsearch,weipinghe/elasticsearch,apepper/elasticsearch,brandonkearby/elasticsearch,Liziyao/elasticsearch,marcuswr/elasticsearch-dateline,fooljohnny/elasticsearch,yynil/elasticsearch,onegambler/elasticsearch,Siddartha07/elasticsearch,kenshin233/elasticsearch,wimvds/elasticsearch,peschlowp/elasticsearch,tebriel/elasticsearch,Uiho/elasticsearch,nazarewk/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,lightslife/elasticsearch,loconsolutions/elasticsearch,vietlq/elasticsearch,kaneshin/elasticsearch,bestwpw/elasticsearch,mortonsykes/elasticsearch,ThiagoGarciaAlves/elasticsearch,palecur/elasticsearch,mapr/elasticsearch,liweinan0423/elasticsearch,lmenezes/elasticsearch,18098924759/elasticsearch,nellicus/elasticsearch,schonfeld/elasticsearch,kingaj/elasticsearch,areek/elasticsearch,uboness/elasticsearch,pranavraman/elasticsearch,yongminxia/elasticsearch,LeoYao/elasticsearch,vietlq/elasticsearch,nomoa/elasticsearch,weipinghe/elasticsearch,scottsom/elasticsearch,feiqitian/elasticsearch,chirilo/elasticsearch,janmejay/elasticsearch,feiqitian/elasticsearch,martinstuga/elasticsearch,VukDukic/elasticsearch,amaliujia/elasticsearch,mrorii/elasticsearch,apepper/elasticsearch,TonyChai24/ESSource,vvcephei/elasticsearch,jpountz/elasticsearch,JackyMai/elasticsearch,kimchy/elasticsearch,strapdata/elassandra,iamjakob/elasticsearch,i-am-Nathan/elasticsearch,sjohnr/elasticsearch,wimvds/elasticsearch,jpountz/elasticsearch,Charlesdong/elasticsearch,jw0201/elastic,mbrukman/elasticsearch,abhijitiitr/es,Kakakakakku/elasticsearch,humandb/elasticsearch,hechunwen/elasticsearch,springning/elasticsearch,sneivandt/elasticsearch,mcku/elasticsearch,AshishThakur/elasticsearch,vvcephei/elasticsearch,Asimov4/elasticsearch,petabytedata/elasticsearch,clintongormley/elasticsearch,phani546/elasticsearch,mikemccand/elasticsearch,JackyMai/elasticsearch,trangvh/elasticsearch,iacdingping/elasticsearch,iamjakob/elasticsearch,girirajsharma/elasticsearch,sc0ttkclark/elasticsearch,jeteve/elasticsearch,hirdesh2008/elasticsearch,sreeramjayan/elasticsearch,strapdata/elassandra-test,myelin/elasticsearch,tkssharma/elasticsearch,ThalaivaStars/OrgRepo1,clintongormley/elasticsearch,Chhunlong/elasticsearch,nezirus/elasticsearch,episerver/elasticsearch,Fsero/elasticsearch,yanjunh/elasticsearch,EasonYi/elasticsearch,Chhunlong/elasticsearch,EasonYi/elasticsearch,henakamaMSFT/elasticsearch,Siddartha07/elasticsearch,smflorentino/elasticsearch,HarishAtGitHub/elasticsearch,kubum/elasticsearch,myelin/elasticsearch,milodky/elasticsearch,vietlq/elasticsearch,hydro2k/elasticsearch,brwe/elasticsearch,easonC/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,rajanm/elasticsearch,mm0/elasticsearch,franklanganke/elasticsearch,achow/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,marcuswr/elasticsearch-dateline,ZTE-PaaS/elasticsearch,pritishppai/elasticsearch,likaiwalkman/elasticsearch,nazarewk/elasticsearch,umeshdangat/elasticsearch,yuy168/elasticsearch,drewr/elasticsearch,lzo/elasticsearch-1,amaliujia/elasticsearch,nknize/elasticsearch,adrianbk/elasticsearch,schonfeld/elasticsearch,geidies/elasticsearch,sscarduzio/elasticsearch,infusionsoft/elasticsearch,huanzhong/elasticsearch,pranavraman/elasticsearch,sdauletau/elasticsearch,jeteve/elasticsearch,kunallimaye/elasticsearch,uboness/elasticsearch,lightslife/elasticsearch,jprante/elasticsearch,andrewvc/elasticsearch,glefloch/elasticsearch,alexkuk/elasticsearch,tahaemin/elasticsearch,KimTaehee/elasticsearch,mrorii/elasticsearch,rento19962/elasticsearch,petmit/elasticsearch,hanst/elasticsearch,bawse/elasticsearch,lchennup/elasticsearch,tsohil/elasticsearch,vingupta3/elasticsearch,nknize/elasticsearch,avikurapati/elasticsearch,easonC/elasticsearch,YosuaMichael/elasticsearch,StefanGor/elasticsearch,djschny/elasticsearch,mkis-/elasticsearch,gingerwizard/elasticsearch,achow/elasticsearch,vingupta3/elasticsearch,mmaracic/elasticsearch,uschindler/elasticsearch,i-am-Nathan/elasticsearch,nrkkalyan/elasticsearch,yanjunh/elasticsearch,hanswang/elasticsearch,girirajsharma/elasticsearch,mrorii/elasticsearch,nknize/elasticsearch,uboness/elasticsearch,djschny/elasticsearch,sreeramjayan/elasticsearch,bawse/elasticsearch,khiraiwa/elasticsearch,YosuaMichael/elasticsearch,alexksikes/elasticsearch,slavau/elasticsearch,lzo/elasticsearch-1,alexkuk/elasticsearch,strapdata/elassandra5-rc,JSCooke/elasticsearch,hechunwen/elasticsearch,lzo/elasticsearch-1,JSCooke/elasticsearch,hechunwen/elasticsearch,marcuswr/elasticsearch-dateline,kevinkluge/elasticsearch,xuzha/elasticsearch,adrianbk/elasticsearch,scottsom/elasticsearch,iacdingping/elasticsearch,IanvsPoplicola/elasticsearch,truemped/elasticsearch,lmenezes/elasticsearch,njlawton/elasticsearch,luiseduardohdbackup/elasticsearch,coding0011/elasticsearch,szroland/elasticsearch,kalburgimanjunath/elasticsearch,jsgao0/elasticsearch,sposam/elasticsearch,wbowling/elasticsearch,petabytedata/elasticsearch,rmuir/elasticsearch,HarishAtGitHub/elasticsearch,vorce/es-metrics,hirdesh2008/elasticsearch,wimvds/elasticsearch,dantuffery/elasticsearch,Shepard1212/elasticsearch,yongminxia/elasticsearch,scorpionvicky/elasticsearch,Uiho/elasticsearch,xpandan/elasticsearch,Asimov4/elasticsearch,andrejserafim/elasticsearch,sarwarbhuiyan/elasticsearch,easonC/elasticsearch,ulkas/elasticsearch,Stacey-Gammon/elasticsearch,knight1128/elasticsearch,rento19962/elasticsearch,Clairebi/ElasticsearchClone,lzo/elasticsearch-1,polyfractal/elasticsearch,tebriel/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,lzo/elasticsearch-1,djschny/elasticsearch,lmtwga/elasticsearch,wenpos/elasticsearch,codebunt/elasticsearch,EasonYi/elasticsearch,fubuki/elasticsearch,18098924759/elasticsearch,Kakakakakku/elasticsearch,andrestc/elasticsearch,mkis-/elasticsearch,wittyameta/elasticsearch,ricardocerq/elasticsearch,nilabhsagar/elasticsearch,apepper/elasticsearch,trangvh/elasticsearch,mohit/elasticsearch,pranavraman/elasticsearch,nellicus/elasticsearch,brwe/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,sscarduzio/elasticsearch,artnowo/elasticsearch,lmtwga/elasticsearch,StefanGor/elasticsearch,fekaputra/elasticsearch,hafkensite/elasticsearch,peschlowp/elasticsearch,sdauletau/elasticsearch,franklanganke/elasticsearch,lchennup/elasticsearch,cnfire/elasticsearch-1,jpountz/elasticsearch,jimczi/elasticsearch,MetSystem/elasticsearch,ivansun1010/elasticsearch,SergVro/elasticsearch,yuy168/elasticsearch,jsgao0/elasticsearch,Charlesdong/elasticsearch,Widen/elasticsearch,onegambler/elasticsearch,jprante/elasticsearch,Fsero/elasticsearch,pablocastro/elasticsearch,Brijeshrpatel9/elasticsearch,palecur/elasticsearch,fforbeck/elasticsearch,amit-shar/elasticsearch,chirilo/elasticsearch,dataduke/elasticsearch,episerver/elasticsearch,scorpionvicky/elasticsearch,queirozfcom/elasticsearch,i-am-Nathan/elasticsearch,vingupta3/elasticsearch,trangvh/elasticsearch,xpandan/elasticsearch,lks21c/elasticsearch,jbertouch/elasticsearch,vorce/es-metrics,rajanm/elasticsearch,jbertouch/elasticsearch,markllama/elasticsearch,AndreKR/elasticsearch,ckclark/elasticsearch,fekaputra/elasticsearch,davidvgalbraith/elasticsearch,elancom/elasticsearch,hanswang/elasticsearch,feiqitian/elasticsearch,luiseduardohdbackup/elasticsearch,Helen-Zhao/elasticsearch,ThalaivaStars/OrgRepo1,JackyMai/elasticsearch,pranavraman/elasticsearch,Clairebi/ElasticsearchClone,winstonewert/elasticsearch,abhijitiitr/es,ckclark/elasticsearch,kimimj/elasticsearch,KimTaehee/elasticsearch,wangtuo/elasticsearch,henakamaMSFT/elasticsearch,huypx1292/elasticsearch,nellicus/elasticsearch,kimchy/elasticsearch,kcompher/elasticsearch,abibell/elasticsearch,LeoYao/elasticsearch,nellicus/elasticsearch,18098924759/elasticsearch,nazarewk/elasticsearch,drewr/elasticsearch,fubuki/elasticsearch,nilabhsagar/elasticsearch,Flipkart/elasticsearch,obourgain/elasticsearch,kalburgimanjunath/elasticsearch,MisterAndersen/elasticsearch,TonyChai24/ESSource,linglaiyao1314/elasticsearch,apepper/elasticsearch,MetSystem/elasticsearch,caengcjd/elasticsearch,jw0201/elastic,nilabhsagar/elasticsearch,TonyChai24/ESSource,iantruslove/elasticsearch,jsgao0/elasticsearch,golubev/elasticsearch,karthikjaps/elasticsearch,scottsom/elasticsearch,karthikjaps/elasticsearch,sneivandt/elasticsearch,gmarz/elasticsearch,dpursehouse/elasticsearch,masterweb121/elasticsearch,vvcephei/elasticsearch,wuranbo/elasticsearch,Asimov4/elasticsearch,franklanganke/elasticsearch,Fsero/elasticsearch,winstonewert/elasticsearch,jimhooker2002/elasticsearch,ouyangkongtong/elasticsearch,szroland/elasticsearch,episerver/elasticsearch,elasticdog/elasticsearch,jchampion/elasticsearch,cwurm/elasticsearch,mjhennig/elasticsearch,xingguang2013/elasticsearch,HonzaKral/elasticsearch,hanswang/elasticsearch,alexshadow007/elasticsearch,yuy168/elasticsearch,jango2015/elasticsearch,alexksikes/elasticsearch,ydsakyclguozi/elasticsearch,aglne/elasticsearch,naveenhooda2000/elasticsearch,ajhalani/elasticsearch,karthikjaps/elasticsearch,loconsolutions/elasticsearch,Liziyao/elasticsearch,Siddartha07/elasticsearch,slavau/elasticsearch,AndreKR/elasticsearch,NBSW/elasticsearch,Brijeshrpatel9/elasticsearch,easonC/elasticsearch,Collaborne/elasticsearch,jeteve/elasticsearch,tahaemin/elasticsearch,jbertouch/elasticsearch,spiegela/elasticsearch,TonyChai24/ESSource,maddin2016/elasticsearch,rento19962/elasticsearch,ESamir/elasticsearch,lydonchandra/elasticsearch,vroyer/elassandra,ulkas/elasticsearch,salyh/elasticsearch,drewr/elasticsearch,dylan8902/elasticsearch,mapr/elasticsearch,huanzhong/elasticsearch,fforbeck/elasticsearch,VukDukic/elasticsearch,hanst/elasticsearch,MetSystem/elasticsearch,hirdesh2008/elasticsearch,codebunt/elasticsearch,acchen97/elasticsearch,strapdata/elassandra5-rc,loconsolutions/elasticsearch,martinstuga/elasticsearch,zkidkid/elasticsearch,kimimj/elasticsearch,feiqitian/elasticsearch,mbrukman/elasticsearch,jbertouch/elasticsearch,polyfractal/elasticsearch,liweinan0423/elasticsearch,mnylen/elasticsearch,humandb/elasticsearch,vorce/es-metrics,C-Bish/elasticsearch,combinatorist/elasticsearch,C-Bish/elasticsearch,MaineC/elasticsearch,MjAbuz/elasticsearch,Shekharrajak/elasticsearch,petabytedata/elasticsearch,khiraiwa/elasticsearch,lightslife/elasticsearch,vrkansagara/elasticsearch,coding0011/elasticsearch,sarwarbhuiyan/elasticsearch,scorpionvicky/elasticsearch,alexksikes/elasticsearch,springning/elasticsearch,ouyangkongtong/elasticsearch,JervyShi/elasticsearch,Widen/elasticsearch,vrkansagara/elasticsearch,rajanm/elasticsearch,henakamaMSFT/elasticsearch,fernandozhu/elasticsearch,ESamir/elasticsearch,mute/elasticsearch,jimczi/elasticsearch,robin13/elasticsearch,18098924759/elasticsearch,jw0201/elastic,peschlowp/elasticsearch,kenshin233/elasticsearch,opendatasoft/elasticsearch,LewayneNaidoo/elasticsearch,masaruh/elasticsearch,SergVro/elasticsearch,sarwarbhuiyan/elasticsearch,yongminxia/elasticsearch,kenshin233/elasticsearch,snikch/elasticsearch,xingguang2013/elasticsearch,Shekharrajak/elasticsearch,sscarduzio/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,mjason3/elasticsearch,milodky/elasticsearch,aglne/elasticsearch,Liziyao/elasticsearch,Rygbee/elasticsearch,ulkas/elasticsearch,HarishAtGitHub/elasticsearch,MjAbuz/elasticsearch,xingguang2013/elasticsearch,knight1128/elasticsearch,mkis-/elasticsearch,HarishAtGitHub/elasticsearch,mgalushka/elasticsearch,KimTaehee/elasticsearch,kalburgimanjunath/elasticsearch,spiegela/elasticsearch,qwerty4030/elasticsearch,obourgain/elasticsearch,AleksKochev/elasticsearch,tcucchietti/elasticsearch,abibell/elasticsearch,sarwarbhuiyan/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,mcku/elasticsearch,abibell/elasticsearch,Siddartha07/elasticsearch,jbertouch/elasticsearch,amit-shar/elasticsearch,lydonchandra/elasticsearch,peschlowp/elasticsearch,franklanganke/elasticsearch,YosuaMichael/elasticsearch,VukDukic/elasticsearch,linglaiyao1314/elasticsearch,alexkuk/elasticsearch,micpalmia/elasticsearch,PhaedrusTheGreek/elasticsearch,andrejserafim/elasticsearch,markwalkom/elasticsearch,petabytedata/elasticsearch,iantruslove/elasticsearch,girirajsharma/elasticsearch,acchen97/elasticsearch,jaynblue/elasticsearch,skearns64/elasticsearch,truemped/elasticsearch,sc0ttkclark/elasticsearch,humandb/elasticsearch,Helen-Zhao/elasticsearch,Fsero/elasticsearch | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
*
*/
public abstract class ShardReplicationOperationRequest<T extends ShardReplicationOperationRequest> extends ActionRequest<T> {
public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES);
protected TimeValue timeout = DEFAULT_TIMEOUT;
protected String index;
private boolean threadedOperation = true;
private ReplicationType replicationType = ReplicationType.DEFAULT;
private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT;
protected ShardReplicationOperationRequest() {
}
public ShardReplicationOperationRequest(ActionRequest request) {
super(request);
}
public ShardReplicationOperationRequest(T request) {
super(request);
this.timeout = request.timeout();
this.index = request.index();
this.threadedOperation = request.operationThreaded();
this.replicationType = request.replicationType();
this.consistencyLevel = request.consistencyLevel();
}
/**
* Controls if the operation will be executed on a separate thread when executed locally.
*/
public final boolean operationThreaded() {
return threadedOperation;
}
/**
* Controls if the operation will be executed on a separate thread when executed locally. Defaults
* to <tt>true</tt> when running in embedded mode.
*/
@SuppressWarnings("unchecked")
public final T operationThreaded(boolean threadedOperation) {
this.threadedOperation = threadedOperation;
return (T) this;
}
/**
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
*/
@SuppressWarnings("unchecked")
public final T timeout(TimeValue timeout) {
this.timeout = timeout;
return (T) this;
}
/**
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
*/
public final T timeout(String timeout) {
return timeout(TimeValue.parseTimeValue(timeout, null));
}
public TimeValue timeout() {
return timeout;
}
public String index() {
return this.index;
}
@SuppressWarnings("unchecked")
public final T index(String index) {
this.index = index;
return (T) this;
}
/**
* The replication type.
*/
public ReplicationType replicationType() {
return this.replicationType;
}
/**
* Sets the replication type.
*/
@SuppressWarnings("unchecked")
public final T replicationType(ReplicationType replicationType) {
this.replicationType = replicationType;
return (T) this;
}
/**
* Sets the replication type.
*/
public final T replicationType(String replicationType) {
return replicationType(ReplicationType.fromString(replicationType));
}
public WriteConsistencyLevel consistencyLevel() {
return this.consistencyLevel;
}
/**
* Sets the consistency level of write. Defaults to {@link org.elasticsearch.action.WriteConsistencyLevel#DEFAULT}
*/
@SuppressWarnings("unchecked")
public final T consistencyLevel(WriteConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel;
return (T) this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (index == null) {
validationException = addValidationError("index is missing", validationException);
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
replicationType = ReplicationType.fromId(in.readByte());
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
timeout = TimeValue.readTimeValue(in);
index = in.readString();
// no need to serialize threaded* parameters, since they only matter locally
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(replicationType.id());
out.writeByte(consistencyLevel.id());
timeout.writeTo(out);
out.writeString(index);
}
/**
* Called before the request gets forked into a local thread.
*/
public void beforeLocalFork() {
}
}
| src/main/java/org/elasticsearch/action/support/replication/ShardReplicationOperationRequest.java | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.replication;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.WriteConsistencyLevel;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
*
*/
public abstract class ShardReplicationOperationRequest<T extends ShardReplicationOperationRequest> extends ActionRequest<T> {
public static final TimeValue DEFAULT_TIMEOUT = new TimeValue(1, TimeUnit.MINUTES);
protected TimeValue timeout = DEFAULT_TIMEOUT;
protected String index;
private boolean threadedOperation = true;
private ReplicationType replicationType = ReplicationType.DEFAULT;
private WriteConsistencyLevel consistencyLevel = WriteConsistencyLevel.DEFAULT;
protected ShardReplicationOperationRequest() {
}
public ShardReplicationOperationRequest(ActionRequest request) {
super(request);
}
public ShardReplicationOperationRequest(T request) {
super(request);
this.timeout = request.timeout();
this.index = request.index();
this.threadedOperation = request.threadedOperation;
this.replicationType = request.replicationType();
this.consistencyLevel = request.consistencyLevel();
}
/**
* Controls if the operation will be executed on a separate thread when executed locally.
*/
public final boolean operationThreaded() {
return threadedOperation;
}
/**
* Controls if the operation will be executed on a separate thread when executed locally. Defaults
* to <tt>true</tt> when running in embedded mode.
*/
@SuppressWarnings("unchecked")
public final T operationThreaded(boolean threadedOperation) {
this.threadedOperation = threadedOperation;
return (T) this;
}
/**
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
*/
@SuppressWarnings("unchecked")
public final T timeout(TimeValue timeout) {
this.timeout = timeout;
return (T) this;
}
/**
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
*/
public final T timeout(String timeout) {
return timeout(TimeValue.parseTimeValue(timeout, null));
}
public TimeValue timeout() {
return timeout;
}
public String index() {
return this.index;
}
@SuppressWarnings("unchecked")
public final T index(String index) {
this.index = index;
return (T) this;
}
/**
* The replication type.
*/
public ReplicationType replicationType() {
return this.replicationType;
}
/**
* Sets the replication type.
*/
@SuppressWarnings("unchecked")
public final T replicationType(ReplicationType replicationType) {
this.replicationType = replicationType;
return (T) this;
}
/**
* Sets the replication type.
*/
public final T replicationType(String replicationType) {
return replicationType(ReplicationType.fromString(replicationType));
}
public WriteConsistencyLevel consistencyLevel() {
return this.consistencyLevel;
}
/**
* Sets the consistency level of write. Defaults to {@link org.elasticsearch.action.WriteConsistencyLevel#DEFAULT}
*/
@SuppressWarnings("unchecked")
public final T consistencyLevel(WriteConsistencyLevel consistencyLevel) {
this.consistencyLevel = consistencyLevel;
return (T) this;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (index == null) {
validationException = addValidationError("index is missing", validationException);
}
return validationException;
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
replicationType = ReplicationType.fromId(in.readByte());
consistencyLevel = WriteConsistencyLevel.fromId(in.readByte());
timeout = TimeValue.readTimeValue(in);
index = in.readString();
// no need to serialize threaded* parameters, since they only matter locally
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeByte(replicationType.id());
out.writeByte(consistencyLevel.id());
timeout.writeTo(out);
out.writeString(index);
}
/**
* Called before the request gets forked into a local thread.
*/
public void beforeLocalFork() {
}
}
| 1.7 compiler failure
fixes #2294.
| src/main/java/org/elasticsearch/action/support/replication/ShardReplicationOperationRequest.java | 1.7 compiler failure fixes #2294. |
|
Java | apache-2.0 | 255722a3ffc41277c20704ff80b961ce38f3b8ab | 0 | asedunov/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,vvv1559/intellij-community,allotria/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,xfournet/intellij-community,signed/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,FHannes/intellij-community,da1z/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,da1z/intellij-community,asedunov/intellij-community,ibinti/intellij-community,semonte/intellij-community,FHannes/intellij-community,FHannes/intellij-community,allotria/intellij-community,allotria/intellij-community,semonte/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,semonte/intellij-community,ibinti/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,signed/intellij-community,signed/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,allotria/intellij-community,suncycheng/intellij-community,signed/intellij-community,da1z/intellij-community,signed/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,semonte/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,da1z/intellij-community,asedunov/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,xfournet/intellij-community,allotria/intellij-community,allotria/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,ibinti/intellij-community,semonte/intellij-community,allotria/intellij-community,suncycheng/intellij-community,semonte/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,apixandru/intellij-community,FHannes/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,allotria/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,xfournet/intellij-community,allotria/intellij-community,FHannes/intellij-community,apixandru/intellij-community,signed/intellij-community,allotria/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,semonte/intellij-community,signed/intellij-community,apixandru/intellij-community,da1z/intellij-community,xfournet/intellij-community,apixandru/intellij-community,asedunov/intellij-community,da1z/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ibinti/intellij-community,FHannes/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,semonte/intellij-community,signed/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,da1z/intellij-community,apixandru/intellij-community,ibinti/intellij-community,semonte/intellij-community,suncycheng/intellij-community,semonte/intellij-community,semonte/intellij-community,asedunov/intellij-community,signed/intellij-community,da1z/intellij-community,FHannes/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,signed/intellij-community,xfournet/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,signed/intellij-community,asedunov/intellij-community,da1z/intellij-community,ibinti/intellij-community,xfournet/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,FHannes/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options.schemes;
import com.intellij.openapi.options.Scheme;
import com.intellij.openapi.options.SchemeManager;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.openapi.ui.MessageType;
import com.intellij.ui.*;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.*;
import java.util.Collection;
import java.util.function.Predicate;
public class SchemesCombo<T extends Scheme> {
// region Message constants
public static final String PROJECT_LEVEL = "Project";
public static final String IDE_LEVEL = "IDE";
public static final String EMPTY_NAME_MESSAGE = "The name must not be empty";
public static final String NAME_ALREADY_EXISTS_MESSAGE = "Name is already in use. Please change to unique name.";
private static final String EDITING_HINT = "Enter to save, Esc to cancel.";
public static final int COMBO_WIDTH = 200;
// endregion
private ComboBox<MySchemeListItem<T>> myComboBox;
private JPanel myRootPanel;
private AbstractSchemesPanel<T> mySchemesPanel;
private final CardLayout myLayout;
private final JTextField myNameEditorField;
private final MyComboBoxModel myComboBoxModel;
private final static KeyStroke ESC_KEY_STROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false);
private final static KeyStroke ENTER_KEY_STROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0, false);
public SchemesCombo(@NotNull AbstractSchemesPanel<T> schemesPanel) {
mySchemesPanel = schemesPanel;
myLayout = new CardLayout();
myRootPanel = new JPanel(myLayout);
myComboBoxModel = new MyComboBoxModel();
createCombo();
myRootPanel.add(myComboBox);
myNameEditorField = createNameEditorField();
myRootPanel.add(myNameEditorField);
myRootPanel.setPreferredSize(new Dimension(JBUI.scale(COMBO_WIDTH), myNameEditorField.getPreferredSize().height));
myRootPanel.setMaximumSize(new Dimension(JBUI.scale(COMBO_WIDTH), Short.MAX_VALUE));
}
private JTextField createNameEditorField() {
JTextField nameEditorField = new JTextField();
nameEditorField.registerKeyboardAction(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
revertSchemeName();
cancelEdit();
}
}, ESC_KEY_STROKE, JComponent.WHEN_FOCUSED);
nameEditorField.registerKeyboardAction(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
stopEdit();
}
}, ENTER_KEY_STROKE, JComponent.WHEN_FOCUSED);
nameEditorField.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
stopEdit();
}
});
nameEditorField.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
validateOnTyping();
}
});
return nameEditorField;
}
private void validateOnTyping() {
String currName = myNameEditorField.getText();
MySchemeListItem<T> selectedItem = getSelectedItem();
if (selectedItem != null && !currName.equals(selectedItem.getSchemeName())) {
String validationMessage = validateSchemeName(currName);
if (validationMessage != null) {
mySchemesPanel.showInfo(validationMessage, MessageType.ERROR);
return;
}
}
showHint();
}
private void showHint() {
mySchemesPanel.showInfo(EDITING_HINT, MessageType.INFO);
}
private void revertSchemeName() {
MySchemeListItem<T> selectedItem = getSelectedItem();
if (selectedItem != null) {
myNameEditorField.setText(selectedItem.getSchemeName());
}
}
public void updateSelected() {
myComboBox.repaint();
}
private void stopEdit() {
String newName = myNameEditorField.getText();
MySchemeListItem<T> selectedItem = getSelectedItem();
if (selectedItem != null) {
if (newName.equals(selectedItem.getSchemeName())) {
cancelEdit();
return;
}
String validationMessage = validateSchemeName(newName);
if (validationMessage != null) {
mySchemesPanel.showInfo(validationMessage, MessageType.ERROR);
}
else {
cancelEdit();
if (selectedItem.getScheme() != null) {
mySchemesPanel.getActions().renameScheme(selectedItem.getScheme(), newName);
}
}
}
}
public void cancelEdit() {
mySchemesPanel.clearInfo();
myLayout.first(myRootPanel);
myRootPanel.requestFocus();
}
private void createCombo() {
myComboBox = new ComboBox<>(myComboBoxModel);
myComboBox.setRenderer(new MyListCellRenderer());
myComboBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
mySchemesPanel.getActions().onSchemeChanged(getSelectedScheme());
}
});
}
public void startEdit() {
T scheme = getSelectedScheme();
if (scheme != null) {
showHint();
myNameEditorField.setText(scheme.getName());
myLayout.last(myRootPanel);
myNameEditorField.requestFocus();
}
}
private SimpleTextAttributes getSchemeAttributes(@NotNull MySchemeListItem<T> item) {
SchemesModel<T> model = mySchemesPanel.getModel();
T scheme = item.getScheme();
SimpleTextAttributes baseAttributes = scheme !=null && model.canDeleteScheme(scheme)
? SimpleTextAttributes.REGULAR_ATTRIBUTES
: SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES;
if (scheme != null && model.canResetScheme(scheme) && model.differsFromDefault(scheme)) {
return baseAttributes.derive(-1, JBColor.BLUE, null, null);
}
return baseAttributes;
}
public void resetSchemes(@NotNull Collection<T> schemes) {
myComboBoxModel.removeAllElements();
SchemesModel<T> model = mySchemesPanel.getModel();
if (mySchemesPanel.supportsProjectSchemes()) {
myComboBoxModel.addElement(new MySeparatorItem(PROJECT_LEVEL));
addItems(schemes, scheme -> model.isProjectScheme(scheme));
myComboBoxModel.addElement(new MySeparatorItem(IDE_LEVEL));
addItems(schemes, scheme -> !model.isProjectScheme(scheme));
}
else {
addItems(schemes, scheme -> true);
}
}
private void addItems(@NotNull Collection<T> schemes, Predicate<T> filter) {
for (T scheme : schemes) {
if (filter.test(scheme)) {
myComboBoxModel.addElement(new MySchemeListItem<>(scheme));
}
}
}
private class MyListCellRenderer extends ColoredListCellRenderer<MySchemeListItem<T>> {
private ListCellRendererWrapper<MySchemeListItem> myWrapper = new ListCellRendererWrapper<MySchemeListItem>() {
@Override
public void customize(JList list,
MySchemeListItem value,
int index,
boolean selected,
boolean hasFocus) {
if (value.isSeparator()) {
setText(" Stored in " + value.getPresentableText());
setSeparator();
}
}
};
@Override
public Component getListCellRendererComponent(JList<? extends MySchemeListItem<T>> list,
MySchemeListItem<T> value,
int index,
boolean selected,
boolean hasFocus) {
if (value.isSeparator()) {
Component c = myWrapper.getListCellRendererComponent(list, value, index, selected, hasFocus);
if (c instanceof TitledSeparator) {
((TitledSeparator)c).getLabel().setForeground(JBColor.GRAY);
return c;
}
}
return super.getListCellRendererComponent(list, value, index, selected, hasFocus);
}
@Override
protected void customizeCellRenderer(@NotNull JList<? extends MySchemeListItem<T>> list,
MySchemeListItem<T> value,
int index,
boolean selected,
boolean hasFocus) {
T scheme = value.getScheme();
if (scheme != null) {
append(value.getPresentableText(), getSchemeAttributes(value));
if (mySchemesPanel.supportsProjectSchemes()) {
if (index == -1) {
append(" " + (mySchemesPanel.getModel().isProjectScheme(scheme) ? PROJECT_LEVEL : IDE_LEVEL),
SimpleTextAttributes.GRAY_ATTRIBUTES);
}
}
}
}
}
@Nullable
public T getSelectedScheme() {
MySchemeListItem<T> item = getSelectedItem();
return item != null ? item.getScheme() : null;
}
@Nullable
public MySchemeListItem<T> getSelectedItem() {
int i = myComboBox.getSelectedIndex();
return i >= 0 ? myComboBox.getItemAt(i) : null;
}
public void selectScheme(@Nullable T scheme) {
for (int i = 0; i < myComboBox.getItemCount(); i ++) {
if (myComboBox.getItemAt(i).getScheme() == scheme) {
myComboBox.setSelectedIndex(i);
break;
}
}
}
public JComponent getComponent() {
return myRootPanel;
}
private class MySeparatorItem extends MySchemeListItem<T> {
private String myTitle;
public MySeparatorItem(@NotNull String title) {
super(null);
myTitle = title;
}
@Override
public boolean isSeparator() {
return true;
}
@NotNull
@Override
public String getPresentableText() {
return myTitle;
}
}
private static class MySchemeListItem<T extends Scheme> {
private @Nullable T myScheme;
public MySchemeListItem(@Nullable T scheme) {
myScheme = scheme;
}
@Nullable
public String getSchemeName() {
return myScheme != null ? myScheme.getName() : null;
}
@Nullable
public T getScheme() {
return myScheme;
}
@NotNull
public String getPresentableText() {
return myScheme != null ? SchemeManager.getDisplayName(myScheme) : "";
}
public boolean isSeparator() {
return false;
}
}
@Nullable
public String validateSchemeName(@NotNull String name) {
if (name.isEmpty()) {
return EMPTY_NAME_MESSAGE;
}
else if (mySchemesPanel.getModel().containsScheme(name)) {
return NAME_ALREADY_EXISTS_MESSAGE;
}
return null;
}
private class MyComboBoxModel extends DefaultComboBoxModel<MySchemeListItem<T>> {
@Override
public void setSelectedItem(Object anObject) {
if (anObject instanceof MySchemeListItem && ((MySchemeListItem)anObject).isSeparator()) {
return;
}
super.setSelectedItem(anObject);
}
}
}
| platform/lang-impl/src/com/intellij/application/options/schemes/SchemesCombo.java | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.application.options.schemes;
import com.intellij.openapi.options.Scheme;
import com.intellij.openapi.options.SchemeManager;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.openapi.ui.MessageType;
import com.intellij.ui.*;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.Collection;
import java.util.function.Predicate;
public class SchemesCombo<T extends Scheme> {
// region Message constants
public static final String PROJECT_LEVEL = "Project";
public static final String IDE_LEVEL = "IDE";
public static final String EMPTY_NAME_MESSAGE = "The name must not be empty";
public static final String NAME_ALREADY_EXISTS_MESSAGE = "Name is already in use. Please change to unique name.";
private static final String EDITING_HINT = "Enter to save, Esc to cancel.";
public static final int COMBO_WIDTH = 200;
// endregion
private ComboBox<MySchemeListItem<T>> myComboBox;
private JPanel myRootPanel;
private AbstractSchemesPanel<T> mySchemesPanel;
private final CardLayout myLayout;
private final JTextField myNameEditorField;
private final MyComboBoxModel myComboBoxModel;
private final static KeyStroke ESC_KEY_STROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false);
private final static KeyStroke ENTER_KEY_STROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0, false);
public SchemesCombo(@NotNull AbstractSchemesPanel<T> schemesPanel) {
mySchemesPanel = schemesPanel;
myLayout = new CardLayout();
myRootPanel = new JPanel(myLayout);
myComboBoxModel = new MyComboBoxModel();
createCombo();
myRootPanel.add(myComboBox);
myNameEditorField = createNameEditorField();
myRootPanel.add(myNameEditorField);
myRootPanel.setPreferredSize(new Dimension(JBUI.scale(COMBO_WIDTH), myNameEditorField.getPreferredSize().height));
myRootPanel.setMaximumSize(new Dimension(JBUI.scale(COMBO_WIDTH), Short.MAX_VALUE));
}
private JTextField createNameEditorField() {
JTextField nameEditorField = new JTextField();
nameEditorField.registerKeyboardAction(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
revertSchemeName();
cancelEdit();
}
}, ESC_KEY_STROKE, JComponent.WHEN_FOCUSED);
nameEditorField.registerKeyboardAction(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
stopEdit();
}
}, ENTER_KEY_STROKE, JComponent.WHEN_FOCUSED);
nameEditorField.addFocusListener(new FocusAdapter() {
@Override
public void focusLost(FocusEvent e) {
stopEdit();
}
});
return nameEditorField;
}
private void revertSchemeName() {
MySchemeListItem<T> selectedItem = getSelectedItem();
if (selectedItem != null) {
myNameEditorField.setText(selectedItem.getSchemeName());
}
}
public void updateSelected() {
myComboBox.repaint();
}
private void stopEdit() {
String newName = myNameEditorField.getText();
MySchemeListItem<T> selectedItem = getSelectedItem();
if (selectedItem != null) {
if (newName.equals(selectedItem.getSchemeName())) {
cancelEdit();
return;
}
String validationMessage = validateSchemeName(newName);
if (validationMessage != null) {
mySchemesPanel.showInfo(validationMessage, MessageType.ERROR);
}
else {
cancelEdit();
if (selectedItem.getScheme() != null) {
mySchemesPanel.getActions().renameScheme(selectedItem.getScheme(), newName);
}
}
}
}
public void cancelEdit() {
mySchemesPanel.clearInfo();
myLayout.first(myRootPanel);
myRootPanel.requestFocus();
}
private void createCombo() {
myComboBox = new ComboBox<>(myComboBoxModel);
myComboBox.setRenderer(new MyListCellRenderer());
myComboBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
mySchemesPanel.getActions().onSchemeChanged(getSelectedScheme());
}
});
}
public void startEdit() {
T scheme = getSelectedScheme();
if (scheme != null) {
mySchemesPanel.showInfo(EDITING_HINT, MessageType.INFO);
myNameEditorField.setText(scheme.getName());
myLayout.last(myRootPanel);
myNameEditorField.requestFocus();
}
}
private SimpleTextAttributes getSchemeAttributes(@NotNull MySchemeListItem<T> item) {
SchemesModel<T> model = mySchemesPanel.getModel();
T scheme = item.getScheme();
SimpleTextAttributes baseAttributes = scheme !=null && model.canDeleteScheme(scheme)
? SimpleTextAttributes.REGULAR_ATTRIBUTES
: SimpleTextAttributes.REGULAR_BOLD_ATTRIBUTES;
if (scheme != null && model.canResetScheme(scheme) && model.differsFromDefault(scheme)) {
return baseAttributes.derive(-1, JBColor.BLUE, null, null);
}
return baseAttributes;
}
public void resetSchemes(@NotNull Collection<T> schemes) {
myComboBoxModel.removeAllElements();
SchemesModel<T> model = mySchemesPanel.getModel();
if (mySchemesPanel.supportsProjectSchemes()) {
myComboBoxModel.addElement(new MySeparatorItem(PROJECT_LEVEL));
addItems(schemes, scheme -> model.isProjectScheme(scheme));
myComboBoxModel.addElement(new MySeparatorItem(IDE_LEVEL));
addItems(schemes, scheme -> !model.isProjectScheme(scheme));
}
else {
addItems(schemes, scheme -> true);
}
}
private void addItems(@NotNull Collection<T> schemes, Predicate<T> filter) {
for (T scheme : schemes) {
if (filter.test(scheme)) {
myComboBoxModel.addElement(new MySchemeListItem<>(scheme));
}
}
}
private class MyListCellRenderer extends ColoredListCellRenderer<MySchemeListItem<T>> {
private ListCellRendererWrapper<MySchemeListItem> myWrapper = new ListCellRendererWrapper<MySchemeListItem>() {
@Override
public void customize(JList list,
MySchemeListItem value,
int index,
boolean selected,
boolean hasFocus) {
if (value.isSeparator()) {
setText(" Stored in " + value.getPresentableText());
setSeparator();
}
}
};
@Override
public Component getListCellRendererComponent(JList<? extends MySchemeListItem<T>> list,
MySchemeListItem<T> value,
int index,
boolean selected,
boolean hasFocus) {
if (value.isSeparator()) {
Component c = myWrapper.getListCellRendererComponent(list, value, index, selected, hasFocus);
if (c instanceof TitledSeparator) {
((TitledSeparator)c).getLabel().setForeground(JBColor.GRAY);
return c;
}
}
return super.getListCellRendererComponent(list, value, index, selected, hasFocus);
}
@Override
protected void customizeCellRenderer(@NotNull JList<? extends MySchemeListItem<T>> list,
MySchemeListItem<T> value,
int index,
boolean selected,
boolean hasFocus) {
T scheme = value.getScheme();
if (scheme != null) {
append(value.getPresentableText(), getSchemeAttributes(value));
if (mySchemesPanel.supportsProjectSchemes()) {
if (index == -1) {
append(" " + (mySchemesPanel.getModel().isProjectScheme(scheme) ? PROJECT_LEVEL : IDE_LEVEL),
SimpleTextAttributes.GRAY_ATTRIBUTES);
}
}
}
}
}
@Nullable
public T getSelectedScheme() {
MySchemeListItem<T> item = getSelectedItem();
return item != null ? item.getScheme() : null;
}
@Nullable
public MySchemeListItem<T> getSelectedItem() {
int i = myComboBox.getSelectedIndex();
return i >= 0 ? myComboBox.getItemAt(i) : null;
}
public void selectScheme(@Nullable T scheme) {
for (int i = 0; i < myComboBox.getItemCount(); i ++) {
if (myComboBox.getItemAt(i).getScheme() == scheme) {
myComboBox.setSelectedIndex(i);
break;
}
}
}
public JComponent getComponent() {
return myRootPanel;
}
private class MySeparatorItem extends MySchemeListItem<T> {
private String myTitle;
public MySeparatorItem(@NotNull String title) {
super(null);
myTitle = title;
}
@Override
public boolean isSeparator() {
return true;
}
@NotNull
@Override
public String getPresentableText() {
return myTitle;
}
}
private static class MySchemeListItem<T extends Scheme> {
private @Nullable T myScheme;
public MySchemeListItem(@Nullable T scheme) {
myScheme = scheme;
}
@Nullable
public String getSchemeName() {
return myScheme != null ? myScheme.getName() : null;
}
@Nullable
public T getScheme() {
return myScheme;
}
@NotNull
public String getPresentableText() {
return myScheme != null ? SchemeManager.getDisplayName(myScheme) : "";
}
public boolean isSeparator() {
return false;
}
}
@Nullable
public String validateSchemeName(@NotNull String name) {
if (name.isEmpty()) {
return EMPTY_NAME_MESSAGE;
}
else if (mySchemesPanel.getModel().containsScheme(name)) {
return NAME_ALREADY_EXISTS_MESSAGE;
}
return null;
}
private class MyComboBoxModel extends DefaultComboBoxModel<MySchemeListItem<T>> {
@Override
public void setSelectedItem(Object anObject) {
if (anObject instanceof MySchemeListItem && ((MySchemeListItem)anObject).isSeparator()) {
return;
}
super.setSelectedItem(anObject);
}
}
}
| Show validation message on typing (IDEA-165087, comment #9)
| platform/lang-impl/src/com/intellij/application/options/schemes/SchemesCombo.java | Show validation message on typing (IDEA-165087, comment #9) |
|
Java | apache-2.0 | 5e61682ffb00288825d742c90a92bd714445387f | 0 | synes/vaadin,asashour/framework,bmitc/vaadin,mittop/vaadin,travisfw/vaadin,sitexa/vaadin,magi42/vaadin,Flamenco/vaadin,magi42/vaadin,udayinfy/vaadin,Legioth/vaadin,mstahv/framework,shahrzadmn/vaadin,Darsstar/framework,magi42/vaadin,fireflyc/vaadin,sitexa/vaadin,Scarlethue/vaadin,Peppe/vaadin,Peppe/vaadin,asashour/framework,bmitc/vaadin,carrchang/vaadin,Peppe/vaadin,peterl1084/framework,bmitc/vaadin,sitexa/vaadin,synes/vaadin,bmitc/vaadin,shahrzadmn/vaadin,oalles/vaadin,kironapublic/vaadin,kironapublic/vaadin,peterl1084/framework,oalles/vaadin,asashour/framework,Darsstar/framework,udayinfy/vaadin,mstahv/framework,fireflyc/vaadin,Legioth/vaadin,synes/vaadin,Darsstar/framework,kironapublic/vaadin,Flamenco/vaadin,travisfw/vaadin,peterl1084/framework,udayinfy/vaadin,peterl1084/framework,jdahlstrom/vaadin.react,mstahv/framework,cbmeeks/vaadin,Legioth/vaadin,Peppe/vaadin,carrchang/vaadin,Scarlethue/vaadin,travisfw/vaadin,cbmeeks/vaadin,Peppe/vaadin,shahrzadmn/vaadin,fireflyc/vaadin,Legioth/vaadin,oalles/vaadin,fireflyc/vaadin,jdahlstrom/vaadin.react,Darsstar/framework,jdahlstrom/vaadin.react,cbmeeks/vaadin,Scarlethue/vaadin,sitexa/vaadin,sitexa/vaadin,mstahv/framework,magi42/vaadin,magi42/vaadin,Darsstar/framework,peterl1084/framework,carrchang/vaadin,udayinfy/vaadin,kironapublic/vaadin,kironapublic/vaadin,fireflyc/vaadin,cbmeeks/vaadin,carrchang/vaadin,synes/vaadin,Flamenco/vaadin,travisfw/vaadin,shahrzadmn/vaadin,mittop/vaadin,Scarlethue/vaadin,Scarlethue/vaadin,Flamenco/vaadin,mittop/vaadin,shahrzadmn/vaadin,jdahlstrom/vaadin.react,travisfw/vaadin,mstahv/framework,mittop/vaadin,oalles/vaadin,Legioth/vaadin,asashour/framework,oalles/vaadin,jdahlstrom/vaadin.react,synes/vaadin,asashour/framework,udayinfy/vaadin | tests/testbench/com/vaadin/tests/tickets/Ticket2440.java | package com.vaadin.tests.tickets;
import java.net.URL;
import com.vaadin.Application;
import com.vaadin.terminal.DownloadStream;
import com.vaadin.terminal.ExternalResource;
import com.vaadin.terminal.URIHandler;
import com.vaadin.ui.Label;
import com.vaadin.ui.Link;
import com.vaadin.ui.Window;
public class Ticket2440 extends Application {
@Override
public void init() {
final Window main = new MainWindow();
setMainWindow(main);
main.addComponent(new Label(
"Clicking the link should open a new window that should receive the URI 'msg/hello' and add that a a Label to it's ui. Currently the Label ends up in this (main) window (try reloading). Console intentionally spams during the window finding/uri handling - looks, uhm, interesting."));
}
@Override
public Window getWindow(String name) {
System.err.println("Looking for " + name);
if ("msg".equals(name)) {
System.err
.println(" rest uri, returning new MainWindow with message from uri");
MainWindow restWindow = new MainWindow();
addWindow(restWindow);
return restWindow;
}
// If we already have the requested window, use it
Window w = super.getWindow(name);
if (w == null) {
// If no window found, create it
System.err.println(" new win");
w = new MainWindow();
w.setName(name);
addWindow(w);
return w;
} else {
System.err.println(" found win");
return w;
}
}
private class MainWindow extends Window {
public MainWindow() {
super("Main window");
addComponent(new Link("new mainwin", new ExternalResource(
Ticket2440.this.getURL() + "msg/hello"), "_blank", -1, -1,
Window.BORDER_DEFAULT));
addURIHandler(new URIHandler() {
public DownloadStream handleURI(URL context, String relativeUri) {
System.err
.println((getMainWindow() == getWindow() ? "mainwin: "
: "subwin: ")
+ context + ", " + relativeUri);
addComponent(new Label(relativeUri));
return null;
}
});
}
@Override
public DownloadStream handleURI(URL context, String relativeUri) {
System.err.println("MainWindow.handleURI();");
return super.handleURI(context, relativeUri);
}
}
}
| Remove test for separate url handlers for different windows | tests/testbench/com/vaadin/tests/tickets/Ticket2440.java | Remove test for separate url handlers for different windows |
||
Java | apache-2.0 | 71584fa59f4d1f97daa43bf612a7717f7a7de570 | 0 | jmuehlner/incubator-guacamole-client,mike-jumper/incubator-guacamole-client,MaxSmile/guacamole-client,lato333/guacamole-client,noelbk/guacamole-client,TribeMedia/guacamole-client,flangelo/guacamole-client,softpymesJeffer/incubator-guacamole-client,hguehl/incubator-guacamole-client,qiangyee/guacamole-client,softpymesJeffer/incubator-guacamole-client,TribeMedia/guacamole-client,TheAxnJaxn/guacamole-client,flangelo/guacamole-client,mike-jumper/incubator-guacamole-client,AIexandr/guacamole-client,MaxSmile/guacamole-client,TheAxnJaxn/guacamole-client,necouchman/incubator-guacamole-client,Akheon23/guacamole-client,hguehl/incubator-guacamole-client,lato333/guacamole-client,qiangyee/guacamole-client,hguehl/incubator-guacamole-client,TheAxnJaxn/guacamole-client,necouchman/incubator-guacamole-client,nkoterba/guacamole-common-js,esmailpour-hosein/incubator-guacamole-client,glyptodon/guacamole-client,lato333/guacamole-client,softpymesJeffer/incubator-guacamole-client,AIexandr/guacamole-client,Akheon23/guacamole-client,TribeMedia/guacamole-client,mike-jumper/incubator-guacamole-client,jmuehlner/incubator-guacamole-client,DaanWillemsen/guacamole-client,noelbk/guacamole-client,glyptodon/guacamole-client,necouchman/incubator-guacamole-client,glyptodon/guacamole-client,jmuehlner/incubator-guacamole-client,mike-jumper/incubator-guacamole-client,Akheon23/guacamole-client,qiangyee/guacamole-client,necouchman/incubator-guacamole-client,DaanWillemsen/guacamole-client,MaxSmile/guacamole-client,glyptodon/guacamole-client,lato333/guacamole-client,noelbk/guacamole-client,softpymesJeffer/incubator-guacamole-client,AIexandr/guacamole-client,glyptodon/guacamole-client,DaanWillemsen/guacamole-client,jmuehlner/incubator-guacamole-client,flangelo/guacamole-client,hguehl/incubator-guacamole-client | /*
* Copyright (C) 2014 Glyptodon LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.glyptodon.guacamole.net.basic.rest.connectiongroup;
import java.util.Collection;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.glyptodon.guacamole.net.auth.ConnectionGroup;
import org.glyptodon.guacamole.net.auth.ConnectionGroup.Type;
import org.glyptodon.guacamole.net.basic.rest.connection.APIConnection;
/**
* A simple connection group to expose through the REST endpoints.
*
* @author James Muehlner
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class APIConnectionGroup {
/**
* The identifier of the root connection group.
*/
public static final String ROOT_IDENTIFIER = "ROOT";
/**
* The name of this connection group.
*/
private String name;
/**
* The identifier of this connection group.
*/
private String identifier;
/**
* The identifier of the parent connection group for this connection group.
*/
private String parentIdentifier;
/**
* The type of this connection group.
*/
private Type type;
/**
* All child connection groups. If children are not being queried, this may
* be omitted.
*/
private Collection<APIConnectionGroup> childConnectionGroups;
/**
* All child connections. If children are not being queried, this may be
* omitted.
*/
private Collection<APIConnection> childConnections;
/**
* Create an empty APIConnectionGroup.
*/
public APIConnectionGroup() {}
/**
* Create a new APIConnectionGroup from the given ConnectionGroup record.
*
* @param connectionGroup The ConnectionGroup record to initialize this
* APIConnectionGroup from.
*/
public APIConnectionGroup(ConnectionGroup connectionGroup) {
this.identifier = connectionGroup.getIdentifier();
this.parentIdentifier = connectionGroup.getParentIdentifier();
this.name = connectionGroup.getName();
this.type = connectionGroup.getType();
}
/**
* Returns the name of this connection group.
* @return The name of this connection group.
*/
public String getName() {
return name;
}
/**
* Set the name of this connection group.
* @param name The name of this connection group.
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns the identifier of this connection group.
* @return The identifier of this connection group.
*/
public String getIdentifier() {
return identifier;
}
/**
* Set the identifier of this connection group.
* @param identifier The identifier of this connection group.
*/
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
/**
* Returns the unique identifier for this connection group.
* @return The unique identifier for this connection group.
*/
public String getParentIdentifier() {
return parentIdentifier;
}
/**
* Sets the parent connection group identifier for this connection group.
* @param parentIdentifier The parent connection group identifier
* for this connection group.
*/
public void setParentIdentifier(String parentIdentifier) {
this.parentIdentifier = parentIdentifier;
}
/**
* Returns the type of this connection group.
* @return The type of this connection group.
*/
public Type getType() {
return type;
}
/**
* Set the type of this connection group.
* @param type The Type of this connection group.
*/
public void setType(Type type) {
this.type = type;
}
/**
* Returns a collection of all child connection groups, or null if children
* have not been queried.
*
* @return
* A collection of all child connection groups, or null if children
* have not been queried.
*/
public Collection<APIConnectionGroup> getChildConnectionGroups() {
return childConnectionGroups;
}
/**
* Sets the collection of all child connection groups to the given
* collection, which may be null if children have not been queried.
*
* @param childConnectionGroups
* The collection containing all child connection groups of this
* connection group, or null if children have not been queried.
*/
public void setChildConnectionGroups(Collection<APIConnectionGroup> childConnectionGroups) {
this.childConnectionGroups = childConnectionGroups;
}
/**
* Returns a collection of all child connections, or null if children have
* not been queried.
*
* @return
* A collection of all child connections, or null if children have not
* been queried.
*/
public Collection<APIConnection> getChildConnections() {
return childConnections;
}
/**
* Sets the collection of all child connections to the given collection,
* which may be null if children have not been queried.
*
* @param childConnections
* The collection containing all child connections of this connection
* group, or null if children have not been queried.
*/
public void setChildConnections(Collection<APIConnection> childConnections) {
this.childConnections = childConnections;
}
}
| guacamole/src/main/java/org/glyptodon/guacamole/net/basic/rest/connectiongroup/APIConnectionGroup.java | /*
* Copyright (C) 2014 Glyptodon LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.glyptodon.guacamole.net.basic.rest.connectiongroup;
import java.util.Collection;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.glyptodon.guacamole.net.auth.ConnectionGroup;
import org.glyptodon.guacamole.net.auth.ConnectionGroup.Type;
import org.glyptodon.guacamole.net.basic.rest.connection.APIConnection;
/**
* A simple connection group to expose through the REST endpoints.
*
* @author James Muehlner
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class APIConnectionGroup {
/**
* The identifier of the root connection group.
*/
public static final String ROOT_IDENTIFIER = "ROOT";
/**
* The name of this connection group.
*/
private String name;
/**
* The identifier of this connection group.
*/
private String identifier;
/**
* The identifier of the parent connection group for this connection group.
*/
private String parentIdentifier;
/**
* The type of this connection group.
*/
private Type type;
/**
* All child connection groups. If children are not being queried, this may
* be omitted.
*/
private Collection<APIConnectionGroup> childConnectionGroups;
/**
* All child connections. If children are not being queried, this may be
* omitted.
*/
private Collection<APIConnection> childConnections;
/**
* Create an empty APIConnectionGroup.
*/
public APIConnectionGroup() {}
/**
* Create a new APIConnectionGroup from the given ConnectionGroup record.
*
* @param connectionGroup The ConnectionGroup record to initialize this
* APIConnectionGroup from.
*/
public APIConnectionGroup(ConnectionGroup connectionGroup) {
this.identifier = connectionGroup.getIdentifier();
this.parentIdentifier = connectionGroup.getParentIdentifier();
// Use the explicit ROOT group ID
if (this.parentIdentifier == null)
this.parentIdentifier = ROOT_IDENTIFIER;
this.name = connectionGroup.getName();
this.type = connectionGroup.getType();
}
/**
* Returns the name of this connection group.
* @return The name of this connection group.
*/
public String getName() {
return name;
}
/**
* Set the name of this connection group.
* @param name The name of this connection group.
*/
public void setName(String name) {
this.name = name;
}
/**
* Returns the identifier of this connection group.
* @return The identifier of this connection group.
*/
public String getIdentifier() {
return identifier;
}
/**
* Set the identifier of this connection group.
* @param identifier The identifier of this connection group.
*/
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
/**
* Returns the unique identifier for this connection group.
* @return The unique identifier for this connection group.
*/
public String getParentIdentifier() {
return parentIdentifier;
}
/**
* Sets the parent connection group identifier for this connection group.
* @param parentIdentifier The parent connection group identifier
* for this connection group.
*/
public void setParentIdentifier(String parentIdentifier) {
this.parentIdentifier = parentIdentifier;
}
/**
* Returns the type of this connection group.
* @return The type of this connection group.
*/
public Type getType() {
return type;
}
/**
* Set the type of this connection group.
* @param type The Type of this connection group.
*/
public void setType(Type type) {
this.type = type;
}
/**
* Returns a collection of all child connection groups, or null if children
* have not been queried.
*
* @return
* A collection of all child connection groups, or null if children
* have not been queried.
*/
public Collection<APIConnectionGroup> getChildConnectionGroups() {
return childConnectionGroups;
}
/**
* Sets the collection of all child connection groups to the given
* collection, which may be null if children have not been queried.
*
* @param childConnectionGroups
* The collection containing all child connection groups of this
* connection group, or null if children have not been queried.
*/
public void setChildConnectionGroups(Collection<APIConnectionGroup> childConnectionGroups) {
this.childConnectionGroups = childConnectionGroups;
}
/**
* Returns a collection of all child connections, or null if children have
* not been queried.
*
* @return
* A collection of all child connections, or null if children have not
* been queried.
*/
public Collection<APIConnection> getChildConnections() {
return childConnections;
}
/**
* Sets the collection of all child connections to the given collection,
* which may be null if children have not been queried.
*
* @param childConnections
* The collection containing all child connections of this connection
* group, or null if children have not been queried.
*/
public void setChildConnections(Collection<APIConnection> childConnections) {
this.childConnections = childConnections;
}
}
| GUAC-932: null connection group identifier does NOT mean root. This is implementation-dependent. | guacamole/src/main/java/org/glyptodon/guacamole/net/basic/rest/connectiongroup/APIConnectionGroup.java | GUAC-932: null connection group identifier does NOT mean root. This is implementation-dependent. |
|
Java | apache-2.0 | ad2c83547de2a59567c6ed3ed8bf814e1b6c1e55 | 0 | ngageoint/mrgeo,ngageoint/mrgeo,ngageoint/mrgeo | /*
* Copyright 2009-2017. DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.mrgeo.cmd.server;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.eclipse.jetty.io.EndPoint;
import org.eclipse.jetty.server.*;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.util.thread.ThreadPool;
import org.glassfish.jersey.servlet.ServletContainer;
import org.mrgeo.cmd.Command;
import org.mrgeo.core.MrGeoProperties;
import org.mrgeo.data.ProviderProperties;
import org.mrgeo.utils.logging.LoggingUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.core.UriBuilder;
import java.io.File;
import java.net.*;
import java.security.KeyStore;
import java.util.Enumeration;
public class WebServer extends Command
{
private static Logger log = LoggerFactory.getLogger(WebServer.class);
public WebServer()
{
}
@Override
public void addOptions(Options options)
{
Option port = new Option("p", "port", true, "The port on which the server will listen (default 8080)");
options.addOption(port);
Option host = new Option("hs", "host", true, "The host on which the server will listen (default ALL interfaces (0.0.0.0))");
options.addOption(host);
Option secure = new Option("sc", "secure", false, "Enable the ssl (https) interface. You MUST at least include the --password option as well");
options.addOption(secure);
Option keystore = new Option("ks", "keystore", true, "Name and location of the Java keystore (default $JAVA_HOME/lib/security/cacerts)");
options.addOption(keystore);
Option pw = new Option("pw", "password", true, "Keystore password (prefix with \"OBF:\" to use a Jetty obfuscated password)");
options.addOption(pw);
Option singleThreaded = new Option("st", "singleThreaded", false, "Specify this argument in order to run the web server in essentially single-threaded mode for processing one request at a time");
options.addOption(singleThreaded);
}
@Override
public String getUsage() { return "webserver <options>"; }
@Override
public int run(CommandLine line, Configuration conf,
ProviderProperties providerProperties) throws ParseException
{
try
{
int httpPort = 8080;
String host = "0.0.0.0";
if (line.hasOption("p"))
{
try
{
httpPort = Integer.parseInt(line.getOptionValue("p"));
}
catch (NumberFormatException nfe)
{
throw new ParseException("Invalid HTTP port specified: " + line.getOptionValue("p"));
}
}
if (line.hasOption("hs"))
{
host = line.getOptionValue("hs");
}
boolean singleThreaded = line.hasOption("st");
boolean secure = line.hasOption("sc");
String keystore = null;
String pw = null;
if (secure)
{
if (line.hasOption("ks") && !line.hasOption("pw"))
{
throw new ParseException("Must supply a keystore password when supplying a keystore location");
}
keystore = line.getOptionValue("ks", null);
pw = line.getOptionValue("pw", "changeit");
}
if (line.hasOption("v"))
{
LoggingUtils.setDefaultLogLevel(LoggingUtils.INFO);
}
if (line.hasOption("d"))
{
LoggingUtils.setDefaultLogLevel(LoggingUtils.DEBUG);
}
runWebServer(host, httpPort, singleThreaded, secure, keystore, pw);
return 0;
}
catch (Exception e)
{
log.error("Exception thrown", e);
}
return -1;
}
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "File() checking for existence")
@SuppressWarnings("squid:S00112") // Passing on exception thrown from 3rd party API
private Server startWebServer(String host, int httpPort, boolean singleThreaded, boolean secure,
String keystore, String pw) throws Exception
{
System.out.println("Starting embedded web server on port " + httpPort);
Server server = null;
if (singleThreaded)
{
// Based on the connector configuration below (min = 1, max = 1), Jetty requires a
// minimum thread pool size of three threads for its processing. One acceptor thread,
// one selector thread, and one request thread. It will queue up requests that it
// can't immediately process.
ThreadPool threadPool = new QueuedThreadPool(3, 1);
server = new Server(threadPool);
}
else if (secure)
{
server = new Server();
}
else
{
InetSocketAddress isa = new InetSocketAddress(host, httpPort);
server = new Server(isa);
}
ServerConnector httpsConnector = null;
if (secure)
{
HttpConfiguration http_config = new HttpConfiguration();
http_config.setSecureScheme("https");
http_config.setSecurePort(httpPort);
HttpConfiguration https_config = new HttpConfiguration(http_config);
https_config.addCustomizer(new SecureRequestCustomizer());
SslContextFactory sslContextFactory;
if (keystore == null)
{
File defkeystore = new File(System.getenv("JAVA_HOME"), "lib/security/cacerts");
sslContextFactory = new SslContextFactory(defkeystore.getCanonicalPath());
}
else
{
sslContextFactory = new SslContextFactory(keystore);
}
sslContextFactory.setKeyStorePassword(pw);
httpsConnector = new ServerConnector(server,
new SslConnectionFactory(sslContextFactory, "http/1.1"),
new HttpConnectionFactory(https_config));
httpsConnector.setPort(httpPort);
}
if (singleThreaded)
{
System.out.println(" Running in single-threaded mode");
// Based on the connector configuration below (min = 1, max = 1), Jetty requires a
// minimum thread pool size of three threads for its processing. One acceptor thread,
// one selector thread, and one request thread. It will queue up requests that it
// can't immediately process.
ServerConnector connector = new ServerConnector(server, 1, 1);
connector.setPort(httpPort);
connector.setHost(host);
if (httpsConnector != null)
{
server.setConnectors(new Connector[]{httpsConnector, connector});
}
else
{
server.setConnectors(new Connector[]{connector});
}
}
else
{
System.out.println(" Running in multi-threaded mode");
if (httpsConnector != null)
{
server.setConnectors(new Connector[]{httpsConnector});
}
}
HandlerCollection coll = new HandlerCollection();
ServletContextHandler context = new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS);
context.setContextPath("/mrgeo");
coll.addHandler(context);
// If the MrGeo configuration defines a static web root path,
// then add a resource handler for being able to access resources
// from that path statically from the root context path.
String webRoot = MrGeoProperties.getInstance().getProperty("web.server.static.root");
if (webRoot != null && !webRoot.isEmpty())
{
boolean goodWebRoot = false;
File f = new File(webRoot);
if (f.exists())
{
if (f.isDirectory())
{
goodWebRoot = true;
}
else
{
System.out
.println("Not serving static web content because web.server.static.root is not a directory: " + webRoot);
}
}
else
{
System.out.println("Not serving static web content because web.server.static.root does not exist: " + webRoot);
}
if (goodWebRoot)
{
System.out.println("Serving static web content from: " + webRoot);
ResourceHandler rh = new ResourceHandler();
rh.setDirectoriesListed(true);
rh.setResourceBase(webRoot);
coll.addHandler(rh);
}
}
server.setHandler(coll);
ServletHolder servletHolder = new ServletHolder(new ServletContainer());
servletHolder.setInitParameter("javax.ws.rs.Application", "org.mrgeo.application.Application");
//servletHolder.setInitParameter("com.sun.jersey.api.json.POJOMappingFeature", "true");
servletHolder.setInitOrder(1);
context.addServlet(servletHolder, "/*");
// context.addServlet("org.mrgeo.services.wms.WmsGenerator", "/WmsGenerator/*");
server.start();
return server;
}
@SuppressWarnings("squid:S00112") // Passing on exception thrown from 3rd party API
private void runWebServer(String host, int httpPort, boolean singleThreaded, boolean secure, String keystore,
String pw) throws Exception
{
Server server = startWebServer(host, httpPort, singleThreaded, secure, keystore, pw);
System.out.println("Embedded web server started, listening on port " + httpPort);
printEndpoints(host, httpPort, server, secure);
System.out.println("Use ctrl-C to stop the web server");
server.join();
}
private void printEndpoints(String host, int httpPort, Server server, boolean secure) throws SocketException, URISyntaxException
{
String protocol;
if (secure)
{
protocol = "https";
}
else
{
protocol = "http";
}
if ("0.0.0.0".contentEquals(host))
{
Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces();
if (en != null)
{
while (en.hasMoreElements())
{
NetworkInterface intf = en.nextElement();
Enumeration<InetAddress> enumIpAddr = intf.getInetAddresses();
while (enumIpAddr.hasMoreElements())
{
InetAddress ipAddr = enumIpAddr.nextElement();
String hostname = ipAddr.getCanonicalHostName();
if (hostname.contains("%"))
{
hostname = StringUtils.substringBefore(hostname, "%");
}
//printInetAddress(ipAddr);
URI uri = new URI(protocol, null, hostname, httpPort, null, null, null);
System.out.println(" " + uri);
}
}
}
}
else
{
String hostname = host;
if (hostname.contains("%"))
{
hostname = StringUtils.substringBefore(hostname, "%");
}
URI uri = new URI(protocol, null, hostname, httpPort, null, null, null);
System.out.println(" " + uri);
}
}
//private static void printInetAddress(InetAddress myAddress) {
// System.out.println( "toString: " + myAddress);
// System.out.println( "hostName: " + myAddress.getHostName());
// System.out.println( "canonicalHostName: " + myAddress.getCanonicalHostName());
// System.out.println( "getHostAddress: " + myAddress.getHostAddress());
//}
}
| mrgeo-cmd/mrgeo-cmd-webserver/src/main/java/org/mrgeo/cmd/server/WebServer.java | /*
* Copyright 2009-2017. DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.mrgeo.cmd.server;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.util.thread.ThreadPool;
import org.glassfish.jersey.servlet.ServletContainer;
import org.joda.time.Period;
import org.joda.time.format.PeriodFormatter;
import org.joda.time.format.PeriodFormatterBuilder;
import org.mrgeo.cmd.Command;
import org.mrgeo.core.MrGeoProperties;
import org.mrgeo.data.ProviderProperties;
import org.mrgeo.utils.logging.LoggingUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.ws.rs.core.UriBuilder;
import java.io.File;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
public class WebServer extends Command
{
private static Logger log = LoggerFactory.getLogger(WebServer.class);
public WebServer()
{
}
@Override
public void addOptions(Options options)
{
Option port = new Option("p", "port", true, "The HTTP port on which the server will listen (default 8080)");
options.addOption(port);
Option singleThreaded = new Option("st", "singleThreaded", false, "Specify this argument in order to run the web server in essentially single-threaded mode for processing one request at a time");
options.addOption(singleThreaded);
}
@Override
public String getUsage() { return "webserver <options>"; }
@Override
public int run(CommandLine line, Configuration conf,
ProviderProperties providerProperties) throws ParseException
{
try
{
int httpPort = 8080;
if (line.hasOption("p"))
{
try
{
httpPort = Integer.parseInt(line.getOptionValue("p", "8080"));
}
catch (NumberFormatException nfe)
{
throw new ParseException("Invalid HTTP port specified: " + line.getOptionValue("p"));
}
}
boolean singleThreaded = false;
try
{
singleThreaded = line.hasOption("st");
}
catch (NumberFormatException nfe)
{
throw new ParseException("Invalid number of connections specified: " + line.getOptionValue("n"));
}
if (line.hasOption("v"))
{
LoggingUtils.setDefaultLogLevel(LoggingUtils.INFO);
}
if (line.hasOption("d"))
{
LoggingUtils.setDefaultLogLevel(LoggingUtils.DEBUG);
}
runWebServer(httpPort, singleThreaded);
return 0;
}
catch (Exception e)
{
log.error("Exception thrown", e);
}
return -1;
}
@SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "File() checking for existence")
@SuppressWarnings("squid:S00112") // Passing on exception thrown from 3rd party API
private Server startWebServer(int httpPort, boolean singleThreaded) throws Exception
{
System.out.println("Starting embedded web server on port " + httpPort);
URI uri = UriBuilder.fromUri("http://" + getHostName() + "/").port(httpPort).build();
Server server = null;
if (singleThreaded) {
System.out.println(" Running in single-threaded mode");
// Based on the connector configuration below (min = 1, max = 1), Jetty requires a
// minimum thread pool size of three threads for its processing. One acceptor thread,
// one selector thread, and one request thread. It will queue up requests that it
// can't immediately process.
ThreadPool threadPool = new QueuedThreadPool(3, 1);
server = new Server(threadPool);
ServerConnector connector = new ServerConnector(server, 1, 1);
// connector.setAcceptQueueSize(0);
connector.setPort(httpPort);
server.setConnectors(new Connector[]{connector});
}
else {
server = new Server(httpPort);
}
HandlerCollection coll = new HandlerCollection();
ServletContextHandler context = new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS);
context.setContextPath("/mrgeo");
coll.addHandler(context);
// If the MrGeo configuration defines a static web root path,
// then add a resource handler for being able to access resources
// from that path statically from the root context path.
String webRoot = MrGeoProperties.getInstance().getProperty("web.server.static.root");
if (webRoot != null && !webRoot.isEmpty())
{
boolean goodWebRoot = false;
File f = new File(webRoot);
if (f.exists())
{
if (f.isDirectory())
{
goodWebRoot = true;
}
else
{
System.out
.println("Not serving static web content because web.server.static.root is not a directory: " + webRoot);
}
}
else
{
System.out.println("Not serving static web content because web.server.static.root does not exist: " + webRoot);
}
if (goodWebRoot)
{
System.out.println("Serving static web content from: " + webRoot);
ResourceHandler rh = new ResourceHandler();
rh.setDirectoriesListed(true);
rh.setResourceBase(webRoot);
coll.addHandler(rh);
}
}
server.setHandler(coll);
ServletHolder servletHolder = new ServletHolder(new ServletContainer());
servletHolder.setInitParameter("javax.ws.rs.Application", "org.mrgeo.application.Application");
//servletHolder.setInitParameter("com.sun.jersey.api.json.POJOMappingFeature", "true");
servletHolder.setInitOrder(1);
context.addServlet(servletHolder, "/*");
// context.addServlet("org.mrgeo.services.wms.WmsGenerator", "/WmsGenerator/*");
server.start();
System.out.println(String.format("Web Server started at %s", uri));
return server;
}
@SuppressWarnings("squid:S00112") // Passing on exception thrown from 3rd party API
private void runWebServer(int httpPort, boolean singleThreaded) throws Exception
{
Server server = startWebServer(httpPort, singleThreaded);
System.out.println("Use ctrl-C to stop the web server");
server.join();
}
private String getHostName()
{
try
{
return InetAddress.getLocalHost().getCanonicalHostName();
}
catch (UnknownHostException e)
{
log.error("Exception thrown", e);
System.err.println("Unknown host");
}
return "localhost";
}
}
| Add optional host parameter. As well as HTTPS server support
HTTPS isn't well tested, only a quick, self-signed cert test
| mrgeo-cmd/mrgeo-cmd-webserver/src/main/java/org/mrgeo/cmd/server/WebServer.java | Add optional host parameter. As well as HTTPS server support |
|
Java | apache-2.0 | error: pathspec 'src/test/runtimeCallStack/Test.java' did not match any file(s) known to git
| c608c96b98ac728c4c1ee7a825c7f8e65cf194c9 | 1 | cheyiliu/test4java |
package test.runtimeCallStack;
import java.io.PrintWriter;
import java.io.StringWriter;
public class Test {
public static void main(String[] args) {
new Test().f();
}
public static String getStackTraceString(Throwable tr) {
if (tr == null) {
return "";
}
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
tr.printStackTrace(pw);
return sw.toString();
}
private void a() {
// eg. how to use, add the below code the the method you want to trace
Throwable t = new Throwable();
t.fillInStackTrace();
System.out.println(getStackTraceString(t));
}
private void b() {
a();
}
private void c() {
b();
}
private void d() {
c();
}
private void e() {
d();
}
private void f() {
e();
}
}
| src/test/runtimeCallStack/Test.java | add get call stack trace test
| src/test/runtimeCallStack/Test.java | add get call stack trace test |
|
Java | apache-2.0 | error: pathspec 'DynamicProgramming/SRM596_DIV2_2.java' did not match any file(s) known to git
| 364fa5d9dd9a1f65afe5d06227b9a7bfbf884aa8 | 1 | manoharprabhu/Topcoder,manoharprabhu/Topcoder |
public class ColorfulRoad {
public int min(int a,int b){
if(a<b)
return a;
else
return b;
}
int[][] matrix;
public int getMin(String road) {
matrix = new int[30][30];
int i,k,j;
for(i=0;i<20;i++){
for(k=0;k<20;k++){
matrix[i][k] = 99999999;
}
}
for(i=1;i<road.length();i++){
for(k=0;k<i;k++){
if((road.charAt(k) == 'R' && road.charAt(i) == 'G') ||
(road.charAt(k) == 'G' && road.charAt(i) == 'B') ||
(road.charAt(k) == 'B' && road.charAt(i) == 'R') ){
matrix[k][i] = (i-k)*(i-k);
}
}
}
for(k=0;k<road.length();k++){
for(i=0;i<road.length();i++){
for(j=0;j<road.length();j++){
matrix[i][j] = min(matrix[i][j],matrix[i][k] + matrix[k][j]);
}
}
}
if(matrix[0][road.length()-1] == 99999999)
return -1;
else
return matrix[0][road.length()-1];
}
}
| DynamicProgramming/SRM596_DIV2_2.java | Added SRM 500 Div 2 problem 2. My first 500 point successful submission ;)
| DynamicProgramming/SRM596_DIV2_2.java | Added SRM 500 Div 2 problem 2. My first 500 point successful submission ;) |
|
Java | apache-2.0 | error: pathspec 'src/com/lxt/linkedList/IntersectNode.java' did not match any file(s) known to git
| 5efdf07cbeae7e831d3bb16506a8e105c3784bdd | 1 | zer0Black/Programmer-code-Interview-Guide | package com.lxt.linkedList;
import org.omg.CORBA.NO_IMPLEMENT;
import com.sun.org.apache.regexp.internal.recompile;
/**
* 两个单链表相交的一系列问题
* 题目:单链表可能有环,也可能无环。给定两个单链表的头节点
* head1和head2,这两个链表可能相交也可能不相交。请
* 实现一个函数,如果两个链表相交,返回相交的第一个
* 节点;如果不相交,返回null即可
* 要求:如果链表1长度N,链表2长度M,时间复杂度达到O(N+M),
* 空间复杂度O(1)
* @author zer0
*
*/
public class IntersectNode {
/**
* 判断链表是否有环,若有,则返回入环节点
* @param head
* @return
* 2016年9月27日
*/
public static Node getLoopNode(Node head){
if(head == null) return null;
Node slow = head;
Node fast = head;
Node p = null; //碰撞节点P
while(fast.next != null && fast.next.next != null){
slow = slow.next;
fast = fast.next.next;
if (slow == fast) {
p = slow;
break;
}
}
while(head != null && p != null){
head = head.next;
p = p.next;
if (head == p) {
break;
}
}
return p;
}
/**
* 都没环的情况
* @param head1
* @param head2
* @return
* 2016年9月27日
*/
public static Node noLoop(Node head1, Node head2){
if (head1 == null || head2 == null) {
return null;
}
int len1 = 0;
Node cur1 = head1;
while(cur1 != null){
len1++;
cur1 = cur1.next;
}
int len2 = 0;
Node cur2 = head2;
while(cur2 != null){
len2++;
cur2 = cur2.next;
}
int k = len1-len2;
cur1 = k>0 ? head1 : head2;
cur2 = cur1 == head1 ? head2 : head1;
k = Math.abs(k);
while(k!=0){
k--;
cur1 = cur1.next;
}
while(cur1 != cur2){
cur1 = cur1.next;
cur2 = cur2.next;
}
return cur1;
}
/**
* 都有环的情况
* @param head1
* @param head2
* @return
* 2016年9月27日
*/
public static Node bothLoop(Node head1, Node loop1, Node head2, Node loop2){
if (head1 == null || head2 == null || loop1 == null || loop2 == null) {
return null;
}
//两种情况,一种是在环外相交,一种是在环内相交
if(loop1 == loop2){
int len1 = 0;
Node cur1 = head1;
while(cur1 != loop1){
len1++;
cur1 = cur1.next;
}
int len2 = 0;
Node cur2 = head2;
while(cur2 != loop2){
len2++;
cur2 = cur2.next;
}
int k = len1-len2;
cur1 = k>0 ? head1 : head2;
cur2 = cur1 == head1 ? head2 : head1;
k = Math.abs(k);
while(k!=0){
k--;
cur1 = cur1.next;
}
while(cur1 != cur2){
cur1 = cur1.next;
cur2 = cur2.next;
}
return cur1;
}else {
Node cur = loop1.next;
while(cur != loop1){
if (cur == loop2) {
return loop1;
}
cur = cur.next;
}
return null;
}
}
public static Node getIntersectNode(Node head1, Node head2){
if (head1 == null || head2 == null) {
return null;
}
Node loop1 = getLoopNode(head1);
Node loop2 = getLoopNode(head2);
if (loop1 == null && loop2 == null) {
return noLoop(head1, head2);
}else if (loop1 != null && loop2 != null) {
return bothLoop(head1, loop1, head2, loop2);
}
return null;
}
public static void main(String[] args) {
Node node1 = new Node(1);
Node node2 = new Node(2);
Node node3 = new Node(3);
Node node4 = new Node(4);
Node node5 = new Node(5);
Node node6 = new Node(6);
Node node7 = new Node(7);
Node node8 = new Node(8);
Node node9 = new Node(9);
node1.next = node2;
node2.next = node3;
node3.next = node4;
node4.next = node5;
node5.next = node6;
node6.next = node3;
node7.next = node8;
node8.next = node9;
node9.next = node5;
Node node = getIntersectNode(node1, node7);
if (node!=null) {
System.out.println("相交节点为" + node.value);
}else {
System.out.println("无相交节点");
}
}
}
| src/com/lxt/linkedList/IntersectNode.java | 1、【添加】两个单链表相交的一系列问题 | src/com/lxt/linkedList/IntersectNode.java | 1、【添加】两个单链表相交的一系列问题 |
|
Java | apache-2.0 | error: pathspec 'src/main/java/me/alivecode/algs4/CPM.java' did not match any file(s) known to git
| db4db2676ad7116fa6c11e9ee16f7740748a1dc2 | 1 | kevinojt/algorithms4 | package me.alivecode.algs4;
import edu.princeton.cs.algs4.In;
import edu.princeton.cs.algs4.StdOut;
/**
* The {@code CPM} class provides a client that resolves parallel
* precedence-constrained job scheduling problem via
* <em>critical path method</em>. It reduces the problem to
* longest-paths problem in edge-weighted DAGs.
*/
public class CPM {
private CPM() {}
/**
* Reads the precedence constraints from standard input
* and prints a feasible schedule to standard output.
* CMP jobsPC.txt
*
* Input:
10
41.0 3 1 7 9
51.0 1 2
50.0 0
36.0 0
38.0 0
45.0 0
21.0 2 3 8
32.0 2 3 8
32.0 1 2
29.0 2 4 6
* Output:
job start finish
--------------------
0 0.0 41.0
1 41.0 92.0
2 123.0 173.0
3 91.0 127.0
4 70.0 108.0
5 0.0 45.0
6 70.0 91.0
7 41.0 73.0
8 91.0 123.0
9 41.0 70.0
Finish time: 173.0
*
* @param args the command-line arguments
*/
public static void main(String[] args) {
In in = new In(args[0]);
int n = in.readInt();
int source = 2 * n;
int sink = 2 * n + 1;
// build the network
EdgeWeightedDigraph G = new EdgeWeightedDigraph(2 * n + 2);
for(int i = 0; i < n; i++) {
double duration = in.readDouble();
G.addEdge(new DirectedEdge(i, i+n, duration));
G.addEdge(new DirectedEdge(source, i, 0.0));
G.addEdge(new DirectedEdge(i+n, sink, 0.0));
int m = in.readInt();
for(int j = 0; j < m; j++) {
int precedent = in.readInt();
G.addEdge(new DirectedEdge(i+n, precedent, 0.0));
}
}
AcyclicLP lp = new AcyclicLP(G, source);
StdOut.println(" job start finish");
StdOut.println("--------------------");
for(int i = 0; i < n; i++) {
StdOut.printf("%4d %6.1f %6.1f\n", i, lp.distTo(i), lp.distTo(i+n));
}
StdOut.printf( "Finish time: %6.1f\n", lp.distTo(sink));
}
}
| src/main/java/me/alivecode/algs4/CPM.java | A CPM class for resolving job scheduling problem.
The CPM class provides a client that resolves parallel
precedence-constrained job scheduling problem via
critical path method. It reduces the problem to
longest-paths problem in edge-weighted DAGs.
| src/main/java/me/alivecode/algs4/CPM.java | A CPM class for resolving job scheduling problem. |
|
Java | apache-2.0 | error: pathspec 'src/main/java/reciter/utils/DegreeYearStrategyUtils.java' did not match any file(s) known to git
| dd8eee786ca9dd53d9afc2b219f5c505ffb0244b | 1 | wcmc-its/ReCiter,wcmc-its/ReCiter | package reciter.utils;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.stereotype.Component;
import lombok.NoArgsConstructor;
@NoArgsConstructor
@Component
public class DegreeYearStrategyUtils {
public Map<Double, Double> getDegreeYearDiscrepancyScoreMap(String degreeYearDiscrepancyScore) {
Map<Double, Double> degreeYearDiscrepancyScoreMap = new HashMap<>();
List<String> degreeYearScoreList = Arrays.asList(degreeYearDiscrepancyScore.trim().split("\\s*,\\s*"));
for(String degreeYearScore: degreeYearScoreList) {
String[] discrepancyToScore = degreeYearScore.trim().split("\\s*\\|\\s*");
if(discrepancyToScore.length == 2) {
degreeYearDiscrepancyScoreMap.put(Double.parseDouble(discrepancyToScore[0]), Double.parseDouble(discrepancyToScore[1]));
}
}
return degreeYearDiscrepancyScoreMap;
}
} | src/main/java/reciter/utils/DegreeYearStrategyUtils.java | generate degree map from properties
| src/main/java/reciter/utils/DegreeYearStrategyUtils.java | generate degree map from properties |
|
Java | apache-2.0 | error: pathspec 'sandbox/src/test/java/ru/stqa/pft/sandbox/PointTests.java' did not match any file(s) known to git
| 068ccf6a0be1930fb5f063b90128dd7ae238b286 | 1 | virtus80/javaForTesters | package ru.stqa.pft.sandbox;
import junit.framework.Assert;
import org.testng.annotations.Test;
/**
* Created by Sasha on 18.04.2016.
*/
public class PointTests {
@Test
public void testDistance(){
Point p1 = new Point(4, 3);
Point p2 = new Point(8, 0);
Assert.assertEquals(p1.distance(p2), 5.0);
}
@Test
public void testLineDistance(){
Point p1 = new Point(4, 3);
Point p3 = new Point(7, 3);
Assert.assertEquals(p1.distance(p3), 3.0);
}
@Test
public void testNegativeDistance(){
Point p1 = new Point(4, 3);
Point p4 = new Point(-4, -3);
Assert.assertEquals(p1.distance(p4), 10.0);
}
}
| sandbox/src/test/java/ru/stqa/pft/sandbox/PointTests.java | Homework3
- added class PointTests for checking corectness distance
| sandbox/src/test/java/ru/stqa/pft/sandbox/PointTests.java | Homework3 |
|
Java | apache-2.0 | error: pathspec 'src/test/java/com/levelup/java/GetFirstNonNullObject.java' did not match any file(s) known to git
| 3ad58d5fdec1df57a8b2693fedd136ffd0c0a72b | 1 | wq19880601/java-util-examples,MartinMSPedersen/levelup-java-examples,karlthepagan/levelup-java-examples,leveluplunch/levelup-java-examples | package com.levelup.java;
import static org.junit.Assert.assertEquals;
import org.apache.commons.lang3.ObjectUtils;
import org.junit.Test;
import com.google.common.base.Objects;
/**
* This java example will demonstrate getting the
* first non null object.
*
* @author Justin Musgrove
* @see <a href='http://www.leveluplunch.com/java/examples/get-first-non-null-object/'>First non null object</a>
*
*/
public class GetFirstNonNullObject {
@Test
public void get_first_non_null_java () {
String first = null;
String second = "Stomp the Hawks, Badgers!";
String firstNullObject = null;
if (first == null) {
if (second != null) {
firstNullObject = second;
}
}
assertEquals(second, firstNullObject);
}
@Test
public void get_first_non_null_guava () {
String first = null;
String second = "Go Badgers!";
String firstNullObject = Objects.firstNonNull(first, second);
assertEquals(second, firstNullObject);
}
@Test
public void get_first_non_null_apache () {
String first = null;
String second = "On, Wisconsin!";
String firstNullObject = ObjectUtils.firstNonNull(first, second);
assertEquals(second, firstNullObject);
}
}
| src/test/java/com/levelup/java/GetFirstNonNullObject.java | added get first non null object
| src/test/java/com/levelup/java/GetFirstNonNullObject.java | added get first non null object |
|
Java | apache-2.0 | error: pathspec 'loader-server/src/main/java/perf/server/util/JobsCache.java' did not match any file(s) known to git
| 2134633c09f491b38a25f6574f0df519ef97481c | 1 | krishnakanthpps/loader,krishnakanthpps/loader,krishnakanthpps/loader,krishnakanthpps/loader | package perf.server.util;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
import perf.server.config.JobFSConfig;
import perf.server.domain.Job;
import perf.server.domain.ResourceCollectionInstance;
import java.io.File;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Map;
import java.util.concurrent.ExecutionException;
/**
* Created with IntelliJ IDEA.
* User: nitinka
* Date: 26/6/13
* Time: 4:25 PM
* To change this template use File | Settings | File Templates.
*/
public class JobsCache {
private static ObjectMapper objectMapper;
private static LoadingCache<String, Job> jobs;
static {
objectMapper = new ObjectMapper();
DateFormat dateFormat = new SimpleDateFormat("MMM dd hh:mm:ss z yyyy");
objectMapper.setDateFormat(dateFormat);
objectMapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
}
public static void initiateCache(final JobFSConfig jobFSConfig) {
jobs = CacheBuilder.newBuilder()
.maximumSize(1000)
.build(
new CacheLoader<String, Job>() {
public Job load(String jobId) throws IOException {
return objectMapper.readValue(new File(jobFSConfig.getJobStatusFile(jobId)), Job.class);
}
});
}
public static Job getJob(String jobId) throws ExecutionException {
return jobs.get(jobId);
}
}
| loader-server/src/main/java/perf/server/util/JobsCache.java | Adding Missing File JobsCache
| loader-server/src/main/java/perf/server/util/JobsCache.java | Adding Missing File JobsCache |
|
Java | apache-2.0 | error: pathspec 'rapidoid-http/src/main/java/org/rapidoid/http/Handlers.java' did not match any file(s) known to git
| 296f10c427e2a8247e4fc499b81364b0fb62d66c | 1 | rapidoid/rapidoid,rapidoid/rapidoid,rapidoid/rapidoid,rapidoid/rapidoid | package org.rapidoid.http;
/*
* #%L
* rapidoid-http
* %%
* Copyright (C) 2014 - 2015 Nikolche Mihajlovski and contributors
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.rapidoid.annotation.Authors;
import org.rapidoid.annotation.Since;
@Authors("Nikolche Mihajlovski")
@Since("4.1.0")
public class Handlers {
public static Handler html(final String text) {
return new Handler() {
@Override
public Object handle(HttpExchange x) throws Exception {
return x.html().result(text);
}
};
}
public static Handler json(final String json) {
return new Handler() {
@Override
public Object handle(HttpExchange x) throws Exception {
return x.json().result(json);
}
};
}
}
| rapidoid-http/src/main/java/org/rapidoid/http/Handlers.java | Added simple HTTP handler factory utils for convenience.
| rapidoid-http/src/main/java/org/rapidoid/http/Handlers.java | Added simple HTTP handler factory utils for convenience. |
|
Java | apache-2.0 | error: pathspec 'OOP/Tracker/src/main/java/ru/matevosyan/models/Comments.java' did not match any file(s) known to git
| 86b249484eb7a0f63a99457b7d35149d76d313d8 | 1 | VardanMatevosyan/Vardan-Git-Repository,VardanMatevosyan/Vardan-Git-Repository,VardanMatevosyan/Vardan-Git-Repository | package ru.matevosyan.models;
/**
* This class was created for Comments, that's hold all comments together inside in item.
* Created on 15.11.2016.
* @since 1.0
* @author Matevosyan Vardan
* @version 1.0
*
*/
public class Comments extends Item {
/**
* Instance variable comments for saving an instance to every single typing comment.
*/
private String comment;
/**
* default constructor.
*/
public Comments() {
}
/**
* Constructor for assign every single user comments.
* @param comment it is the variable that pass comment to constructor
*/
public Comments(String comment) {
this.comment = comment;
}
/**
* override toString method for print out formatting comments.
* @return the formatting line
*/
@Override
public String toString() {
return String.format("%s", this.comment);
}
}
| OOP/Tracker/src/main/java/ru/matevosyan/models/Comments.java | Add comments.java
| OOP/Tracker/src/main/java/ru/matevosyan/models/Comments.java | Add comments.java |
|
Java | apache-2.0 | error: pathspec 'src/main/java/org/anarres/jdiagnostics/DiagnosticsFactory.java' did not match any file(s) known to git
| d787e31267fc586b9e06d1c600266f8852e37beb | 1 | shevek/jdiagnostics,shevek/jdiagnostics | package org.anarres.jdiagnostics;
/**
*
* @author shevek
*/
public class DiagnosticsFactory {
public static Query forException(ClassNotFoundException e) {
CompositeQuery query = new CompositeQuery();
query.add(new ClassExistsQuery(e.getMessage()));
query.add(new ClassLoaderQuery("system", String.class.getClassLoader()));
query.add(new ClassLoaderQuery("threadcontext", Thread.currentThread().getContextClassLoader()));
query.add(new ClassLoaderQuery("jdiagnostics", DiagnosticsFactory.class.getClassLoader()));
return query;
}
}
| src/main/java/org/anarres/jdiagnostics/DiagnosticsFactory.java | Experimental DiagnosticsFactory class.
| src/main/java/org/anarres/jdiagnostics/DiagnosticsFactory.java | Experimental DiagnosticsFactory class. |
|
Java | apache-2.0 | error: pathspec 'app/src/main/java/iecs/fcu_navigate/database/MarkerDBHelper.java' did not match any file(s) known to git
| 7660f0238b35de3ebcd1047f41a8076a8f4b4993 | 1 | danny50610/FCU-Navigate | package iecs.fcu_navigate.database;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
public class MarkerDBHelper extends SQLiteOpenHelper {
public static final int DATABASE_VERSION = 1;
public static final String DATABASE_NAME = "Marker.db";
public MarkerDBHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
}
| app/src/main/java/iecs/fcu_navigate/database/MarkerDBHelper.java | [WIP] 建立地標資料庫
| app/src/main/java/iecs/fcu_navigate/database/MarkerDBHelper.java | [WIP] 建立地標資料庫 |
|
Java | apache-2.0 | error: pathspec 'assembly/src/test/java/org/apache/activemq/config/IDERunner.java' did not match any file(s) known to git
| ac24a08b8b102fc90c34cb6d467c4dd7092a276c | 1 | chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq,chirino/activemq | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.config;
import org.apache.activemq.broker.BrokerFactory;
import org.apache.activemq.broker.BrokerService;
import org.apache.camel.util.FileUtil;
import java.io.File;
/**
* A helper class that can be used to start the full broker distro with default configuration
* in an IDE. It can be helpful for debugging/testing externally provided test cases.
*/
public class IDERunner {
public static void main(String[] args) throws Exception {
System.setProperty("activemq.base", ".");
System.setProperty("activemq.home", "."); // not a valid home but ok for xml validation
System.setProperty("activemq.data", "target/");
System.setProperty("activemq.conf", "src/release/conf");
FileUtil.removeDir(new File("target/kahadb"));
BrokerService broker = BrokerFactory.createBroker("xbean:src/release/conf/activemq.xml");
broker.start();
broker.waitUntilStopped();
}
}
| assembly/src/test/java/org/apache/activemq/config/IDERunner.java | IDERunner util class that can run full distro from IDE
| assembly/src/test/java/org/apache/activemq/config/IDERunner.java | IDERunner util class that can run full distro from IDE |
|
Java | apache-2.0 | error: pathspec 'grill-api/src/main/java/com/inmobi/grill/api/GrillConfConstants.java' did not match any file(s) known to git
| 786c344ebae6533cc5727736b96dcfa31a7c403a | 1 | adeelmahmood/lens,guptapuneet/lens,prongs/grill,kamaldeep-ebay/lens,sriksun/incubator-lens,RaghavendraSingh/lens,sushrutikhar/grill,archanah24/lens,kamaldeep-ebay/lens,adeelmahmood/lens,archanah24/lens,guptapuneet/lens,sriksun/incubator-lens,sushrutikhar/grill,RaghavendraSingh/lens,kamaldeep-ebay/lens,Flipkart/incubator-lens,rajubairishetti/incubator-lens,archanah24/lens,Flipkart/incubator-lens,sushilmohanty/incubator-lens,sriksun/incubator-lens,sushrutikhar/grill,adeelmahmood/lens,rajubairishetti/lens,archanah24/lens,rajubairishetti/lens,sriksun/incubator-lens,sushrutikhar/grill,adeelmahmood/lens,sushrutikhar/grill,sushilmohanty/incubator-lens,RaghavendraSingh/lens,rajubairishetti/incubator-lens,sushilmohanty/incubator-lens,RaghavendraSingh/lens,rajubairishetti/incubator-lens,kamaldeep-ebay/lens,RaghavendraSingh/lens,prongs/grill,guptapuneet/lens,guptapuneet/lens,sushilmohanty/incubator-lens,prongs/grill,Flipkart/incubator-lens,rajubairishetti/lens,sushilmohanty/incubator-lens,sriksun/incubator-lens,Flipkart/incubator-lens,archanah24/lens | package com.inmobi.grill.api;
public class GrillConfConstants {
public static final String PREPARE_ON_EXPLAIN = "grill.doprepare.on.explain";
public static final Boolean DEFAULT_PREPARE_ON_EXPLAIN = true;
public static final String ENGINE_DRIVER_CLASSES = "grill.drivers";
}
| grill-api/src/main/java/com/inmobi/grill/api/GrillConfConstants.java | Add misisng class
| grill-api/src/main/java/com/inmobi/grill/api/GrillConfConstants.java | Add misisng class |
|
Java | apache-2.0 | error: pathspec 'src/main/java/org/apache/commons/digester3/binder/CallMethodBuilder.java' did not match any file(s) known to git
| bb8aeb11a9f833b95f08cbae309ced3b06f8ebdc | 1 | apache/commons-digester,apache/commons-digester,callMeDimit/commons-digester,callMeDimit/commons-digester,mohanaraosv/commons-digester,apache/commons-digester,callMeDimit/commons-digester,mohanaraosv/commons-digester,mohanaraosv/commons-digester | /* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.digester3.binder;
import java.util.Arrays;
import org.apache.commons.digester3.CallMethodRule;
/**
* Builder chained when invoking {@link LinkedRuleBuilder#callMethod(String)}.
*/
public final class CallMethodBuilder
extends AbstractBackToLinkedRuleBuilder<CallMethodRule>
{
private final String methodName;
private final ClassLoader classLoader;
private int targetOffset;
private int paramCount = 0;
private Class<?>[] paramTypes = new Class<?>[]{};
private boolean useExactMatch = false;
public CallMethodBuilder( String keyPattern, String namespaceURI, RulesBinder mainBinder,
LinkedRuleBuilder mainBuilder, String methodName, ClassLoader classLoader )
{
super( keyPattern, namespaceURI, mainBinder, mainBuilder );
this.methodName = methodName;
this.classLoader = classLoader;
}
/**
* Sets the location of the target object.
*
* Positive numbers are relative to the top of the digester object stack.
* Negative numbers are relative to the bottom of the stack. Zero implies the top object on the stack.
*
* @param targetOffset location of the target object.
* @return this builder instance
*/
public CallMethodBuilder withTargetOffset( int targetOffset )
{
this.targetOffset = targetOffset;
return this;
}
/**
* Sets the Java class names that represent the parameter types of the method arguments.
*
* If you wish to use a primitive type, specify the corresonding Java wrapper class instead,
* such as {@code java.lang.Boolean.TYPE} for a {@code boolean} parameter.
*
* @param The Java classe names that represent the parameter types of the method arguments
* @return this builder instance
*/
public CallMethodBuilder withParamTypes( String... paramTypeNames )
{
if ( paramTypeNames != null )
{
this.paramTypes = new Class[paramTypeNames.length];
for ( int i = 0; i < paramTypeNames.length; i++ )
{
try
{
this.paramTypes[i] = classLoader.loadClass( paramTypeNames[i] );
}
catch ( ClassNotFoundException e )
{
this.reportError( String.format( "callMethod(\"%s\").withParamTypes(%s)", this.methodName,
Arrays.toString( paramTypeNames ) ),
String.format( "class '%s' cannot be load", paramTypeNames[i] ) );
}
}
}
return this;
}
/**
* Sets the Java classes that represent the parameter types of the method arguments.
*
* If you wish to use a primitive type, specify the corresonding Java wrapper class instead,
* such as {@code java.lang.Boolean.TYPE} for a {@code boolean} parameter.
*
* @param paramTypes The Java classes that represent the parameter types of the method arguments
* @return this builder instance
*/
public CallMethodBuilder withParamTypes( Class<?>... paramTypes )
{
this.paramTypes = paramTypes;
if ( paramTypes != null )
{
this.paramCount = paramTypes.length;
}
return this;
}
/**
* Should <code>MethodUtils.invokeExactMethod</code> be used for the reflection.
*
* @param useExactMatch Flag to mark exact matching or not
* @return this builder instance
*/
public CallMethodBuilder useExactMatch( boolean useExactMatch )
{
this.useExactMatch = useExactMatch;
return this;
}
/**
* The number of parameters to collect, or zero for a single argument from the body of this element.
*
* @param paramCount The number of parameters to collect, or zero for a single argument
* from the body of this element.
* @return this builder instance
*/
public CallMethodBuilder withParamCount( int paramCount )
{
if ( paramCount < 0 )
{
this.reportError( String.format( "callMethod(\"%s\").withParamCount(int)", this.methodName ),
"negative parameters counter not allowed" );
}
this.paramCount = paramCount;
if ( this.paramCount == 0 )
{
if ( this.paramTypes == null || this.paramTypes.length != 1 )
{
this.paramTypes = new Class<?>[] { String.class };
}
}
else
{
this.paramTypes = new Class<?>[this.paramCount];
for ( int i = 0; i < paramTypes.length; i++ )
{
this.paramTypes[i] = String.class;
}
}
return this;
}
/**
* Prepare the {@link CallMethodRule} to be invoked using the matching element body as argument.
*
* @return this builder instance
*/
public CallMethodBuilder usingElementBodyAsArgument()
{
return withParamCount( 0 );
}
/**
* {@inheritDoc}
*/
@Override
protected CallMethodRule createRule()
{
CallMethodRule callMethodRule = new CallMethodRule( targetOffset, methodName, paramCount, paramTypes );
callMethodRule.setUseExactMatch( useExactMatch );
return callMethodRule;
}
}
| src/main/java/org/apache/commons/digester3/binder/CallMethodBuilder.java | first checkin of CallMethodBuilder class
git-svn-id: c3d1f7498fb08a2885afe49e111c402c6cd8f5f6@1103122 13f79535-47bb-0310-9956-ffa450edef68
| src/main/java/org/apache/commons/digester3/binder/CallMethodBuilder.java | first checkin of CallMethodBuilder class |
|
Java | apache-2.0 | error: pathspec 'src/test/java/com/google/research/bleth/services/DatabaseServiceTest.java' did not match any file(s) known to git
| d72c95a324254b7d21310dfbed5bb8cfaa3ca428 | 1 | googleinterns/BLETH,googleinterns/BLETH,googleinterns/BLETH | package com.google.research.bleth.services;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import com.google.appengine.api.datastore.dev.LocalDatastoreService;
import com.google.appengine.repackaged.com.google.gson.Gson;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.common.collect.ArrayTable;
import com.google.common.collect.Multimap;
import com.google.research.bleth.exceptions.BoardStateAlreadyExistsException;
import com.google.research.bleth.exceptions.ExceedingRoundException;
import com.google.research.bleth.simulator.AbstractSimulation;
import com.google.research.bleth.simulator.AwakenessStrategyFactory;
import com.google.research.bleth.simulator.Beacon;
import com.google.research.bleth.simulator.Board;
import com.google.research.bleth.simulator.EstimatedBoard;
import com.google.research.bleth.simulator.IAgent;
import com.google.research.bleth.simulator.Location;
import com.google.research.bleth.simulator.Observer;
import com.google.research.bleth.simulator.RandomMovementStrategy;
import com.google.research.bleth.simulator.RealBoard;
import com.google.research.bleth.simulator.StationaryMovementStrategy;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.ArrayList;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@RunWith(MockitoJUnitRunner.class)
public class DatabaseServiceTest {
private final LocalServiceTestHelper helper =
new LocalServiceTestHelper(new LocalDatastoreServiceTestConfig()
.setAutoIdAllocationPolicy(LocalDatastoreService.AutoIdAllocationPolicy.SCATTERED));
private static final Gson GSON = new Gson();
private static final Location ZERO_ON_ZERO_COORDINATE = new Location(0, 0);
private static final Location ONE_ON_ONE_COORDINATE = new Location(1, 1);
private static final int BOARD_DIMENSION = 2;
private static final int ZERO_ROUND = 0;
private static final int FIRST_ROUND = 1;
private static final int MAX_NUMBER_OF_ROUNDS = 100;
@Mock
AbstractSimulation.Builder firstSimulationBuilder;
@Mock
AbstractSimulation.Builder secondSimulationBuilder;
@Mock
Beacon beacon;
@Mock
Observer observer;
@Before
public void setUp() {
helper.setUp();
}
@Before
public void mockFirstSimulationBuilder() {
Mockito.when(firstSimulationBuilder.getSimulationType()).thenReturn("Stalking");
Mockito.when(firstSimulationBuilder.getMaxNumberOfRounds()).thenReturn(100);
Mockito.when(firstSimulationBuilder.getBeaconsNum()).thenReturn(2);
Mockito.when(firstSimulationBuilder.getObserversNum()).thenReturn(1);
Mockito.when(firstSimulationBuilder.getRowNum()).thenReturn(2);
Mockito.when(firstSimulationBuilder.getColNum()).thenReturn(2);
Mockito.when(firstSimulationBuilder.getBeaconMovementStrategy()).thenReturn(new RandomMovementStrategy());
Mockito.when(firstSimulationBuilder.getObserverMovementStrategy()).thenReturn(new StationaryMovementStrategy());
Mockito.when(firstSimulationBuilder.getAwakenessStrategyType()).thenReturn(AwakenessStrategyFactory.Type.FIXED);
Mockito.when(firstSimulationBuilder.getRadius()).thenReturn(3.5);
Mockito.when(firstSimulationBuilder.getAwakenessCycle()).thenReturn(5);
Mockito.when(firstSimulationBuilder.getAwakenessDuration()).thenReturn(1);
}
@Before
public void mockSecondSimulationBuilder() {
Mockito.when(secondSimulationBuilder.getSimulationType()).thenReturn("Stalking");
Mockito.when(secondSimulationBuilder.getMaxNumberOfRounds()).thenReturn(100);
Mockito.when(secondSimulationBuilder.getBeaconsNum()).thenReturn(2);
Mockito.when(secondSimulationBuilder.getObserversNum()).thenReturn(1);
Mockito.when(secondSimulationBuilder.getRowNum()).thenReturn(2);
Mockito.when(secondSimulationBuilder.getColNum()).thenReturn(2);
Mockito.when(secondSimulationBuilder.getBeaconMovementStrategy()).thenReturn(new RandomMovementStrategy());
Mockito.when(secondSimulationBuilder.getObserverMovementStrategy()).thenReturn(new StationaryMovementStrategy());
Mockito.when(secondSimulationBuilder.getAwakenessStrategyType()).thenReturn(AwakenessStrategyFactory.Type.FIXED);
Mockito.when(secondSimulationBuilder.getRadius()).thenReturn(3.5);
Mockito.when(secondSimulationBuilder.getAwakenessCycle()).thenReturn(5);
Mockito.when(secondSimulationBuilder.getAwakenessDuration()).thenReturn(1);
}
@Before
public void mockAgents() {
Mockito.when(beacon.getId()).thenReturn(0);
Mockito.when(beacon.getType()).thenReturn("Beacon");
Mockito.when(observer.getId()).thenReturn(0);
Mockito.when(observer.getType()).thenReturn("Observer");
}
@Test
public void writeNonEmptyRealBoardThenReadRealBoard_shouldGetExpectedJson() {
DatabaseService db = DatabaseService.getInstance();
RealBoard realBoard = new RealBoard(BOARD_DIMENSION, BOARD_DIMENSION);
realBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
realBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
String expectedBoardState = GSON.toJson(toStaticBoardState(realBoard));
db.writeBoardState(firstSimulationId, ZERO_ROUND, realBoard);
String boardState = db.readRealBoardState(firstSimulationId, ZERO_ROUND);
assertThat(boardState).isEqualTo(expectedBoardState);
}
@Test
public void writeNonEmptyEstimatedBoardThenReadEstimatedBoard_shouldGetExpectedJson() {
DatabaseService db = DatabaseService.getInstance();
EstimatedBoard estimatedBoard = new EstimatedBoard(BOARD_DIMENSION, BOARD_DIMENSION);
estimatedBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
estimatedBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
String expectedBoardState = GSON.toJson(toStaticBoardState(estimatedBoard));
db.writeBoardState(firstSimulationId, ZERO_ROUND, estimatedBoard);
String boardState = db.readEstimatedBoardState(firstSimulationId, ZERO_ROUND);
assertThat(boardState).isEqualTo(expectedBoardState);
}
@Test
public void writeOnlyRealBoardThenReadEstimatedBoard_shouldGetEmptyBoard() {
DatabaseService db = DatabaseService.getInstance();
RealBoard realBoard = new RealBoard(BOARD_DIMENSION, BOARD_DIMENSION);
realBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
realBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
String emptyBoardStateJson = GSON.toJson(createEmptyTable(BOARD_DIMENSION, BOARD_DIMENSION));
db.writeBoardState(firstSimulationId, ZERO_ROUND, realBoard);
assertThat(db.readEstimatedBoardState(firstSimulationId, ZERO_ROUND)).isEqualTo(emptyBoardStateJson);
}
@Test
public void writeRealBoardThenReadRealBoardWithDifferentSimulationId_shouldGetEmptyBoard() {
DatabaseService db = DatabaseService.getInstance();
RealBoard realBoard = new RealBoard(BOARD_DIMENSION, BOARD_DIMENSION);
realBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
realBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
String secondSimulationId = db.writeMetadata(secondSimulationBuilder);
String emptyBoardStateJson = GSON.toJson(createEmptyTable(2, 2));
db.writeBoardState(firstSimulationId, ZERO_ROUND, realBoard);
assertThat(db.readRealBoardState(secondSimulationId, ZERO_ROUND)).isEqualTo(emptyBoardStateJson);
}
@Test
public void writeEstimatedBoardThenReadEstimatedBoardWithDifferentExistingRound_shouldGetEmptyBoard() {
DatabaseService db = DatabaseService.getInstance();
EstimatedBoard estimatedBoard = new EstimatedBoard(BOARD_DIMENSION, BOARD_DIMENSION);
estimatedBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
estimatedBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
String emptyBoardStateJson = GSON.toJson(createEmptyTable(BOARD_DIMENSION, BOARD_DIMENSION));
db.writeBoardState(firstSimulationId, ZERO_ROUND, estimatedBoard);
assertThat(db.readRealBoardState(firstSimulationId, FIRST_ROUND)).isEqualTo(emptyBoardStateJson);
}
@Test
public void writeEstimatedBoardThenReadEstimatedBoardWithDifferentNonExistingRound_shouldGetNull() {
DatabaseService db = DatabaseService.getInstance();
EstimatedBoard estimatedBoard = new EstimatedBoard(BOARD_DIMENSION, BOARD_DIMENSION);
estimatedBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
estimatedBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
db.writeBoardState(firstSimulationId, ZERO_ROUND, estimatedBoard);
assertThat(db.readRealBoardState(firstSimulationId, MAX_NUMBER_OF_ROUNDS)).isNull();
}
@Test
public void writeRealBoardWithNonExistingRound_shouldThrowException() {
DatabaseService db = DatabaseService.getInstance();
RealBoard realBoard = new RealBoard(BOARD_DIMENSION, BOARD_DIMENSION);
realBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
realBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
assertThrows(ExceedingRoundException.class, () -> {
db.writeBoardState(firstSimulationId, MAX_NUMBER_OF_ROUNDS, realBoard);
});
}
@Test
public void writeDuplicateSimulationIdAndRound_shouldThrowException() {
DatabaseService db = DatabaseService.getInstance();
RealBoard realBoard = new RealBoard(BOARD_DIMENSION, BOARD_DIMENSION);
realBoard.placeAgent(ZERO_ON_ZERO_COORDINATE, beacon);
realBoard.placeAgent(ONE_ON_ONE_COORDINATE, observer);
String firstSimulationId = db.writeMetadata(firstSimulationBuilder);
db.writeBoardState(firstSimulationId, ZERO_ROUND, realBoard);
assertThrows(BoardStateAlreadyExistsException.class, () -> {
db.writeBoardState(firstSimulationId, 0, realBoard);
});
}
@After
public void tearDown() {
helper.tearDown();
}
private ArrayTable<Integer, Integer, ArrayList<String>> toStaticBoardState(Board board) {
int rowNum = board.getRowNum();
int colNum = board.getColNum();
ArrayTable<Integer, Integer, ArrayList<String>> boardState = createEmptyTable(rowNum, colNum);
Multimap<Location, IAgent> agentsOnBoard = board.agentsOnBoard();
for (Location location : agentsOnBoard.keys()) {
for (IAgent agent : agentsOnBoard.get(location)) {
boardState.get(location.row, location.col).add(agent.getType() + agent.getId());
}
}
return boardState;
}
private ArrayTable<Integer, Integer, ArrayList<String>> createEmptyTable(int rowNum, int colNum) {
ArrayTable<Integer, Integer, ArrayList<String>> table;
table = ArrayTable.create(IntStream.range(0, rowNum).boxed().collect(Collectors.toList()),
IntStream.range(0, colNum).boxed().collect(Collectors.toList()));
for (int row = 0; row < rowNum; row++) {
for (int col = 0; col < colNum; col++) {
table.set(row, col, new ArrayList<>());
}
}
return table;
}
}
| src/test/java/com/google/research/bleth/services/DatabaseServiceTest.java | implement database service unit test class.
| src/test/java/com/google/research/bleth/services/DatabaseServiceTest.java | implement database service unit test class. |
|
Java | apache-2.0 | error: pathspec 'platform/util/src/com/intellij/util/indexing/impl/forward/MapForwardIndexAccessor.java' did not match any file(s) known to git
| c6d66ff92131c7e53330e91048a4c9f9292bbf45 | 1 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.indexing.impl.forward;
import com.intellij.util.indexing.impl.InputDataDiffBuilder;
import com.intellij.util.indexing.impl.MapInputDataDiffBuilder;
import com.intellij.util.io.DataExternalizer;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Map;
public class MapForwardIndexAccessor<Key, Value, Input> extends AbstractForwardIndexAccessor<Key, Value, Map<Key, Value>, Input> {
public MapForwardIndexAccessor(@NotNull DataExternalizer<Map<Key, Value>> externalizer) {
super(externalizer);
}
@Override
protected InputDataDiffBuilder<Key, Value> createDiffBuilder(int inputId, @Nullable Map<Key, Value> inputData) {
return new MapInputDataDiffBuilder<>(inputId, inputData);
}
@Override
protected Map<Key, Value> convertToDataType(@Nullable Map<Key, Value> map, @Nullable Input content) {
return map;
}
}
| platform/util/src/com/intellij/util/indexing/impl/forward/MapForwardIndexAccessor.java | forward index accessor for key-value map
| platform/util/src/com/intellij/util/indexing/impl/forward/MapForwardIndexAccessor.java | forward index accessor for key-value map |
|
Java | apache-2.0 | error: pathspec 'user-service/src/main/java/io/crowdcode/flaschenlager/user/UserServiceApplication.java' did not match any file(s) known to git
| 0cbc90bcea2eb2b3080ce4f07801f64f0341d66d | 1 | crowdcode-de/spring-cloud-performance-tuning,crowdcode-de/spring-cloud-performance-tuning | package io.crowdcode.flaschenlager.user;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class UserServiceApplication {
public static void main(String[] args) {
SpringApplication.run(UserServiceApplication.class, args);
}
}
| user-service/src/main/java/io/crowdcode/flaschenlager/user/UserServiceApplication.java | add spring boot application to user-service module
| user-service/src/main/java/io/crowdcode/flaschenlager/user/UserServiceApplication.java | add spring boot application to user-service module |
|
Java | apache-2.0 | error: pathspec 'TeamworkApiDemo/app/src/main/java/com/vishnus1224/teamworkapidemo/model/UserConfig.java' did not match any file(s) known to git
| cb50f47055e6e47b26763584ba998f710d881a4a | 1 | vishnus1224/RxJavaTeamworkClient | package com.vishnus1224.teamworkapidemo.model;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Config for the logged in user.
* Wraps the token and the site url.
* Created by Vishnu on 8/14/2016.
*/
public class UserConfig implements Parcelable {
private final String apiToken;
private final String url;
public UserConfig(String apiToken, String url) {
this.apiToken = apiToken;
this.url = url;
}
protected UserConfig(Parcel in) {
apiToken = in.readString();
url = in.readString();
}
public static final Creator<UserConfig> CREATOR = new Creator<UserConfig>() {
@Override
public UserConfig createFromParcel(Parcel in) {
return new UserConfig(in);
}
@Override
public UserConfig[] newArray(int size) {
return new UserConfig[size];
}
};
public String getUrl() {
return url;
}
public String getApiToken() {
return apiToken;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel parcel, int i) {
parcel.writeString(apiToken);
parcel.writeString(url);
}
}
| TeamworkApiDemo/app/src/main/java/com/vishnus1224/teamworkapidemo/model/UserConfig.java | configuration details of the logged in user
| TeamworkApiDemo/app/src/main/java/com/vishnus1224/teamworkapidemo/model/UserConfig.java | configuration details of the logged in user |
|
Java | apache-2.0 | error: pathspec 'examples/src/main/java/org/apache/pdfbox/examples/interactive/form/DetermineTextFitsField.java' did not match any file(s) known to git
| 1c362339fe6d23346e5864f570c388d53cb8089e | 1 | apache/pdfbox,kalaspuffar/pdfbox,kalaspuffar/pdfbox,apache/pdfbox | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.examples.interactive.form;
import java.io.File;
import java.io.IOException;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDResources;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotationWidget;
import org.apache.pdfbox.pdmodel.interactive.form.PDAcroForm;
import org.apache.pdfbox.pdmodel.interactive.form.PDField;
import org.apache.pdfbox.pdmodel.interactive.form.PDTextField;
/**
* Determine if text length fits the field.
*
* This sample builds on the form generated by @link CreateSimpleForm so you need to run that first.
*
*/
public class DetermineTextFitsField
{
public static void main(String[] args) throws IOException
{
// Load the PDF document created by SimpleForm.java
PDDocument document = PDDocument.load(new File("target/SimpleForm.pdf"));
PDAcroForm acroForm = document.getDocumentCatalog().getAcroForm();
// Get the field and the widget associated to it.
// Note: there might be multiple widgets
PDField field = acroForm.getField("SampleField");
PDAnnotationWidget widget = field.getWidgets().get(0);
// Get the width of the fields box
float widthOfField = widget.getRectangle().getWidth();
// Get the font and the font size setting
// This is currently a little awkward and needs improvement to have a better API
// for that. In many cases the string will be built like that:
// /Helv 12 Tf 0 g
// We could use PDFStreamParser to do the parsing. For the sample we split the
// string.
String defaultAppearance = ((PDTextField) field).getDefaultAppearance();
String[] parts = defaultAppearance.split(" ");
// Get the font name
COSName fontName = COSName.getPDFName(parts[0].substring(1));
float fontSize = Float.parseFloat(parts[1]);
// Get the font resource.
// First look up the font from the widgets appearance stream.
// This will be the case if there is already a value.
// If the value hasn't been set yet the font resource needs to be looked up from
// the AcroForm default resources
PDFont font = null;
PDResources resources = null;
resources = widget.getNormalAppearanceStream().getResources();
if (resources != null)
{
font = resources.getFont(fontName);
}
if (font == null)
{
font = acroForm.getDefaultResources().getFont(fontName);
}
String willFit = "short string";
String willNotFit = "this is a very long string which will not fit the width of the widget";
// calculate the string width at a certain font size
float willFitWidth = font.getStringWidth(willFit) * fontSize / 1000;
float willNotFitWidth = font.getStringWidth(willNotFit) * fontSize / 1000;
assert willFitWidth < widthOfField;
assert willNotFitWidth > widthOfField;
document.close();
}
}
| examples/src/main/java/org/apache/pdfbox/examples/interactive/form/DetermineTextFitsField.java | PDFBOX-3389: add sample to determine if a text fits the width of a field
git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1749360 13f79535-47bb-0310-9956-ffa450edef68
| examples/src/main/java/org/apache/pdfbox/examples/interactive/form/DetermineTextFitsField.java | PDFBOX-3389: add sample to determine if a text fits the width of a field |
|
Java | bsd-2-clause | error: pathspec 'src/test/java/io/scif/Main.java' did not match any file(s) known to git
| 88dfea3a04d74c6b98ebe794a7997dee030c0940 | 1 | scifio/scifio | /*
* #%L
* SCIFIO library for reading and converting scientific file formats.
* %%
* Copyright (C) 2011 - 2016 Board of Regents of the University of
* Wisconsin-Madison
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package io.scif;
import io.scif.img.ImgIOException;
import io.scif.img.ImgOpener;
import io.scif.img.SCIFIOImgPlus;
import java.io.File;
import java.util.List;
import javax.swing.JFileChooser;
/**
* Sample main method for reading images using SCIFIO.
*
* @author Curtis Rueden
*/
public class Main {
public static void main(final String... args) throws ImgIOException {
final JFileChooser fileChooser = new JFileChooser();
if (fileChooser.showOpenDialog(null) != JFileChooser.APPROVE_OPTION) return;
final File file = fileChooser.getSelectedFile();
final SCIFIO scifio = new SCIFIO();
final List<SCIFIOImgPlus<?>> imgs = //
new ImgOpener(scifio.context()).openImgs(file.getAbsolutePath());
System.out.println("Found " + imgs.size() + " images");
for (final SCIFIOImgPlus<?> img : imgs) {
System.out.println("\t" + img);
}
scifio.context().dispose();
}
}
| src/test/java/io/scif/Main.java | Add a simple Main class
| src/test/java/io/scif/Main.java | Add a simple Main class |
|
Java | bsd-3-clause | 3fc98c3c924ce69d7e17d8d7308930b10028a3cd | 0 | reki2000/raven-java6,littleyang/raven-java,buckett/raven-java,reki2000/raven-java6,buckett/raven-java,galmeida/raven-java,galmeida/raven-java,littleyang/raven-java | package net.kencochrane.raven.exception;
public class InvalidDsnException extends RuntimeException{
public InvalidDsnException() {
}
public InvalidDsnException(String message) {
super(message);
}
public InvalidDsnException(String message, Throwable cause) {
super(message, cause);
}
public InvalidDsnException(Throwable cause) {
super(cause);
}
}
| raven/src/main/java/net/kencochrane/raven/exception/InvalidDsnException.java | package net.kencochrane.raven.exception;
public class InvalidDsnException extends RuntimeException{
public InvalidDsnException() {
}
public InvalidDsnException(String message) {
super(message);
}
public InvalidDsnException(String message, Throwable cause) {
super(message, cause);
}
public InvalidDsnException(Throwable cause) {
super(cause);
}
public InvalidDsnException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
| Remove Java7 specific method
| raven/src/main/java/net/kencochrane/raven/exception/InvalidDsnException.java | Remove Java7 specific method |